diff --git a/README.md b/README.md index 3c01527..cd19467 100644 --- a/README.md +++ b/README.md @@ -234,4 +234,4 @@ This project is licensed under the terms of the license included in the [LICENSE ## Contact -For any queries or suggestions, please open an issue on the GitHub repository. \ No newline at end of file +For any queries or suggestions, please open an issue on the GitHub repository. diff --git a/cmd/grabitsh/api.go b/cmd/grabitsh/api.go new file mode 100644 index 0000000..364d701 --- /dev/null +++ b/cmd/grabitsh/api.go @@ -0,0 +1,46 @@ +package grabitsh + +import ( + "os" + "path/filepath" + "regexp" +) + +type APIInfo struct { + Files []string `json:"files"` + Swagger bool `json:"swagger"` + GraphQL bool `json:"graphql"` + Endpoints []string `json:"endpoints"` + HTTPMethods []string `json:"http_methods"` +} + +func analyzeAPIStructure() APIInfo { + var apiInfo APIInfo + + apiPatterns := []string{"*api*.go", "*controller*.rb", "*views*.py", "routes/*.js", "controllers/*.js"} + for _, pattern := range apiPatterns { + files, _ := filepath.Glob(pattern) + apiInfo.Files = append(apiInfo.Files, files...) + } + + apiInfo.Swagger = fileExists("swagger.json") || fileExists("swagger.yaml") + apiInfo.GraphQL = fileExists("schema.graphql") || fileExists("schema.gql") + + // Analyze API endpoints and HTTP methods + for _, file := range apiInfo.Files { + content, err := os.ReadFile(file) + if err != nil { + continue + } + + // Extract endpoints (this is a simple example, you might need more sophisticated regex for your specific use case) + endpointRegex := regexp.MustCompile(`(GET|POST|PUT|DELETE|PATCH)\s+["']([^"']+)["']`) + matches := endpointRegex.FindAllStringSubmatch(string(content), -1) + for _, match := range matches { + apiInfo.HTTPMethods = appendUnique(apiInfo.HTTPMethods, match[1]) + apiInfo.Endpoints = appendUnique(apiInfo.Endpoints, match[2]) + } + } + + return apiInfo +} diff --git a/cmd/grabitsh/architecture.go b/cmd/grabitsh/architecture.go new file mode 100644 index 0000000..fc47ebd --- /dev/null +++ b/cmd/grabitsh/architecture.go @@ -0,0 +1,19 @@ +package grabitsh + +func detectArchitecture() string { + if dirExists("services") || dirExists("microservices") { + return "Microservices" + } else if fileExists("serverless.yml") || fileExists("serverless.yaml") { + return "Serverless" + } else if dirExists("app") && dirExists("config") && dirExists("db") { + return "Monolithic (Rails-like)" + } else if fileExists("package.json") && fileExists("server.js") { + return "Monolithic (Node.js)" + } else if fileExists("pom.xml") || fileExists("build.gradle") { + return "Monolithic (Java)" + } else if fileExists("manage.py") && dirExists("apps") { + return "Monolithic (Django)" + } + + return "Undetermined" +} diff --git a/cmd/grabitsh/cicd.go b/cmd/grabitsh/cicd.go new file mode 100644 index 0000000..3437730 --- /dev/null +++ b/cmd/grabitsh/cicd.go @@ -0,0 +1,93 @@ +package grabitsh + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +type Step struct { + Name string `json:"name"` + Description string `json:"description"` +} + +type CICDSystem struct { + Name string `json:"name"` + File string `json:"file"` + Steps []Step `json:"steps"` +} + +func analyzeCICDWorkflows() ([]CICDSystem, error) { + cicdSystems := []struct { + name string + files []string + }{ + {"GitHub Actions", []string{".github/workflows/*.yml", ".github/workflows/*.yaml"}}, + {"GitLab CI", []string{".gitlab-ci.yml"}}, + {"Jenkins", []string{"Jenkinsfile"}}, + {"CircleCI", []string{".circleci/config.yml"}}, + {"Travis CI", []string{".travis.yml"}}, + {"Azure Pipelines", []string{"azure-pipelines.yml"}}, + {"Bitbucket Pipelines", []string{"bitbucket-pipelines.yml"}}, + {"AWS CodeBuild", []string{"buildspec.yml"}}, + {"Drone CI", []string{".drone.yml"}}, + {"Semaphore", []string{".semaphore/semaphore.yml"}}, + } + + var results []CICDSystem + + for _, system := range cicdSystems { + for _, filePattern := range system.files { + files, err := filepath.Glob(filePattern) + if err != nil { + return nil, fmt.Errorf("error globbing files: %w", err) + } + for _, file := range files { + content, err := os.ReadFile(file) + if err != nil { + return nil, fmt.Errorf("error reading file %s: %w", file, err) + } + steps, err := analyzeCICDSteps(string(content)) + if err != nil { + return nil, fmt.Errorf("error analyzing steps in file %s: %w", file, err) + } + results = append(results, CICDSystem{ + Name: system.name, + File: filepath.Base(file), + Steps: steps, + }) + } + } + } + + return results, nil +} + +func analyzeCICDSteps(content string) ([]Step, error) { + var steps []Step + + if strings.Contains(content, "npm test") || strings.Contains(content, "yarn test") { + steps = append(steps, Step{Name: "Testing", Description: "Runs tests"}) + } + if strings.Contains(content, "npm run build") || strings.Contains(content, "yarn build") { + steps = append(steps, Step{Name: "Build", Description: "Builds the project"}) + } + if strings.Contains(content, "docker build") || strings.Contains(content, "docker-compose") { + steps = append(steps, Step{Name: "Docker operations", Description: "Performs Docker operations"}) + } + if strings.Contains(content, "deploy") || strings.Contains(content, "kubectl") { + steps = append(steps, Step{Name: "Deployment", Description: "Deploys the project"}) + } + if strings.Contains(content, "lint") || strings.Contains(content, "eslint") { + steps = append(steps, Step{Name: "Linting", Description: "Runs linter"}) + } + if strings.Contains(content, "security") || strings.Contains(content, "scan") { + steps = append(steps, Step{Name: "Security scanning", Description: "Performs security scanning"}) + } + if strings.Contains(content, "coverage") || strings.Contains(content, "codecov") { + steps = append(steps, Step{Name: "Code coverage", Description: "Checks code coverage"}) + } + + return steps, nil +} diff --git a/cmd/grabitsh/code_quality.go b/cmd/grabitsh/code_quality.go new file mode 100644 index 0000000..f598670 --- /dev/null +++ b/cmd/grabitsh/code_quality.go @@ -0,0 +1,33 @@ +package grabitsh + +func analyzeCodeQuality() []string { + var tools []string + + lintConfigs := map[string]string{ + ".eslintrc": "ESLint", + ".rubocop.yml": "RuboCop", + ".golangci.yml": "golangci-lint", + "pylintrc": "Pylint", + ".checkstyle": "Checkstyle (Java)", + "tslint.json": "TSLint", + ".stylelintrc": "Stylelint", + ".prettierrc": "Prettier", + ".scalafmt.conf": "Scalafmt", + } + + for config, tool := range lintConfigs { + if fileExists(config) { + tools = append(tools, tool) + } + } + + if fileExists("sonar-project.properties") { + tools = append(tools, "SonarQube") + } + + if fileExists(".codeclimate.yml") { + tools = append(tools, "CodeClimate") + } + + return tools +} diff --git a/cmd/grabitsh/database.go b/cmd/grabitsh/database.go new file mode 100644 index 0000000..69b523e --- /dev/null +++ b/cmd/grabitsh/database.go @@ -0,0 +1,88 @@ +package grabitsh + +import ( + "os" + "path/filepath" + "strings" +) + +type DatabaseInfo struct { + MigrationsPresent bool `json:"migrations_present"` + ConfigFiles []string `json:"config_files"` + ORMUsed bool `json:"orm_used"` + DatabaseTypes []string `json:"database_types"` +} + +func analyzeDatabaseUsage() DatabaseInfo { + var dbInfo DatabaseInfo + + dbInfo.MigrationsPresent = dirExists("migrations") || dirExists("db/migrate") + + dbConfigFiles := []string{ + "config/database.yml", + "knexfile.js", + "ormconfig.json", + "sequelize.config.js", + "database.json", + "dbconfig.json", + "mongo.config.js", + "redis.config.js", + } + + for _, file := range dbConfigFiles { + if fileExists(file) { + dbInfo.ConfigFiles = append(dbInfo.ConfigFiles, file) + } + } + + ormFiles := []string{ + "models.py", + "*.model.ts", + "*.rb", + "entity/*.go", + "*.entity.ts", + "models/*.java", + "entities/*.cs", + } + + for _, pattern := range ormFiles { + files, _ := filepath.Glob(pattern) + if len(files) > 0 { + dbInfo.ORMUsed = true + break + } + } + + // Detect database types + dbTypes := map[string][]string{ + "PostgreSQL": {"postgres", "postgresql", "pg"}, + "MySQL": {"mysql", "mariadb"}, + "SQLite": {"sqlite", "sqlite3"}, + "MongoDB": {"mongodb", "mongo", "mongoose"}, + "Redis": {"redis", "rediss"}, + "Cassandra": {"cassandra", "cql"}, + "Oracle": {"oracle", "orcl"}, + "SQL Server": {"sqlserver", "mssql"}, + "DB2": {"db2", "ibm"}, + "Couchbase": {"couchbase"}, + "Firebird": {"firebird"}, + "ClickHouse": {"clickhouse"}, + } + + for dbType, keywords := range dbTypes { + for _, file := range dbInfo.ConfigFiles { + content, err := os.ReadFile(file) + if err != nil { + continue + } + for _, keyword := range keywords { + if strings.Contains(string(content), keyword) { + dbInfo.DatabaseTypes = appendUnique(dbInfo.DatabaseTypes, dbType) + break + } + } + } + } + + return dbInfo +} diff --git a/cmd/grabitsh/dependency_management.go b/cmd/grabitsh/dependency_management.go new file mode 100644 index 0000000..9bc2a2a --- /dev/null +++ b/cmd/grabitsh/dependency_management.go @@ -0,0 +1,27 @@ +package grabitsh + +func analyzeDependencyManagement() []string { + var tools []string + + depManagement := map[string]string{ + "package-lock.json": "npm", + "yarn.lock": "Yarn", + "Gemfile.lock": "Bundler (Ruby)", + "poetry.lock": "Poetry (Python)", + "go.sum": "Go Modules", + "composer.lock": "Composer (PHP)", + "Pipfile.lock": "Pipenv (Python)", + "pom.xml": "Maven (Java)", + "build.gradle": "Gradle (Java)", + "requirements.txt": "pip (Python)", + "Cargo.lock": "Cargo (Rust)", + } + + for file, tool := range depManagement { + if fileExists(file) { + tools = append(tools, tool) + } + } + + return tools +} diff --git a/cmd/grabitsh/framework_versions.go b/cmd/grabitsh/framework_versions.go new file mode 100644 index 0000000..2fbbdb0 --- /dev/null +++ b/cmd/grabitsh/framework_versions.go @@ -0,0 +1,84 @@ +package grabitsh + +import ( + "os" + "regexp" + "strings" +) + +func extractFrameworkVersions() map[string]string { + versions := make(map[string]string) + + // Check for versions of various frameworks using specific files and regex patterns. + checkFrameworkVersion := func(file, framework, regex string) { + if fileExists(file) { + content, _ := os.ReadFile(file) + re := regexp.MustCompile(regex) + matches := re.FindStringSubmatch(string(content)) + if len(matches) > 1 { + versions[framework] = matches[1] + } + } + } + + // Ruby/Rails framework + checkFrameworkVersion("Gemfile.lock", "Rails", `rails \((\d+\.\d+\.\d+)\)`) + + // JavaScript/Node.js frameworks + checkFrameworkVersion("package.json", "React", `"react": "(\^|~)?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("package.json", "Vue.js", `"vue": "(\^|~)?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("package.json", "Angular", `"@angular/core": "(\^|~)?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("package.json", "Express", `"express": "(\^|~)?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("package.json", "Next.js", `"next": "(\^|~)?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("package.json", "Svelte", `"svelte": "(\^|~)?(\d+\.\d+\.\d+)"`) + + // Python frameworks + checkFrameworkVersion("requirements.txt", "Django", `Django==(\d+\.\d+\.\d+)`) + checkFrameworkVersion("requirements.txt", "Flask", `Flask==(\d+\.\d+\.\d+)`) + checkFrameworkVersion("requirements.txt", "FastAPI", `fastapi==(\d+\.\d+\.\d+)`) + + // PHP frameworks + checkFrameworkVersion("composer.lock", "Laravel", `"name": "laravel/framework",\s*"version": "v?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("composer.lock", "Symfony", `"name": "symfony/symfony",\s*"version": "v?(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("composer.lock", "WordPress", `"name": "wordpress/core",\s*"version": "v?(\d+\.\d+\.\d+)"`) + + // Go frameworks + checkFrameworkVersion("go.mod", "Gin", `github.com/gin-gonic/gin\s*v(\d+\.\d+\.\d+)`) + checkFrameworkVersion("go.mod", "Echo", `github.com/labstack/echo/v4\s*v(\d+\.\d+\.\d+)`) + checkFrameworkVersion("go.mod", "Fiber", `github.com/gofiber/fiber/v2\s*v(\d+\.\d+\.\d+)`) + + // Rust frameworks + checkFrameworkVersion("Cargo.toml", "Rocket", `rocket\s*=\s*"(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("Cargo.toml", "Actix", `actix-web\s*=\s*"(\d+\.\d+\.\d+)"`) + checkFrameworkVersion("Cargo.toml", "Tide", `tide\s*=\s*"(\d+\.\d+\.\d+)"`) + + // Java frameworks + checkFrameworkVersion("pom.xml", "Spring Boot", `(\d+\.\d+\.\d+)`) + checkFrameworkVersion("build.gradle", "Spring Boot", `springBootVersion = '(\d+\.\d+\.\d+)'`) + + // Check for Node.js and npm versions + if fileExists("package.json") { + versions["Node.js"] = strings.TrimSpace(runCommand("node", "-v")) + versions["npm"] = strings.TrimSpace(runCommand("npm", "-v")) + } + + // Check for Python version + versions["Python"] = strings.TrimSpace(runCommand("python", "--version")) + + // Check for Go version + if fileExists("go.mod") { + versions["Go"] = strings.TrimSpace(strings.TrimPrefix(runCommand("go", "version"), "go version ")) + } + + // Check for PHP version + if fileExists("composer.lock") { + versions["PHP"] = strings.TrimSpace(runCommand("php", "-v")) + } + + // Check for Rust version + if fileExists("Cargo.toml") { + versions["Rust"] = strings.TrimSpace(runCommand("rustc", "--version")) + } + + return versions +} diff --git a/cmd/grabitsh/main_analysis.go b/cmd/grabitsh/main_analysis.go new file mode 100644 index 0000000..e95bc9a --- /dev/null +++ b/cmd/grabitsh/main_analysis.go @@ -0,0 +1,74 @@ +package grabitsh + +import ( + "bytes" + "encoding/json" + "sync" +) + +type AnalysisResult struct { + Architecture string `json:"architecture"` + FrameworkVersions map[string]string `json:"framework_versions"` + CICDSystems []CICDSystem `json:"cicd_systems"` + APIStructure APIInfo `json:"api_structure"` + DatabaseUsage DatabaseInfo `json:"database_usage"` + TestingFrameworks []string `json:"testing_frameworks"` + CodeQualityTools []string `json:"code_quality_tools"` + DependencyManagement []string `json:"dependency_management"` +} + +func PerformAdvancedAnalysis(buffer *bytes.Buffer) { + buffer.WriteString("\n### Advanced Analysis ###\n") + + var result AnalysisResult + var wg sync.WaitGroup + wg.Add(7) + + go func() { + defer wg.Done() + result.Architecture = detectArchitecture() + }() + + go func() { + defer wg.Done() + result.FrameworkVersions = extractFrameworkVersions() + }() + + // Fix for capturing two return values + go func() { + defer wg.Done() + cicdSystems, err := analyzeCICDWorkflows() // Capture both values + if err != nil { // Handle the error + buffer.WriteString("Error analyzing CI/CD workflows: " + err.Error() + "\n") + return + } + result.CICDSystems = cicdSystems // Assign result if no error + }() + + go func() { + defer wg.Done() + result.APIStructure = analyzeAPIStructure() + }() + + go func() { + defer wg.Done() + result.DatabaseUsage = analyzeDatabaseUsage() + }() + + go func() { + defer wg.Done() + result.TestingFrameworks = analyzeTestingFrameworks() + }() + + go func() { + defer wg.Done() + result.CodeQualityTools = analyzeCodeQuality() + result.DependencyManagement = analyzeDependencyManagement() + }() + + wg.Wait() + + // Marshal the result to JSON and write to buffer + jsonResult, _ := json.MarshalIndent(result, "", " ") + buffer.WriteString(string(jsonResult)) +} diff --git a/cmd/grabitsh/root.go b/cmd/grabitsh/root.go index 008e9d3..bcb5457 100644 --- a/cmd/grabitsh/root.go +++ b/cmd/grabitsh/root.go @@ -55,6 +55,7 @@ func runGrabit(cmd *cobra.Command, args []string) { collectSecurityAnalysis(&outputBuffer) collectPerformanceMetrics(&outputBuffer) DetectImportantFiles(&outputBuffer) + PerformAdvancedAnalysis(&outputBuffer) // Output results finalizeOutput(outputBuffer.String()) @@ -116,11 +117,18 @@ func collectProjectTypes(buffer *bytes.Buffer) { func collectTODOs(buffer *bytes.Buffer) { buffer.WriteString("\n### TODOs and FIXMEs ###\n") - todoCommand := `grep -r -n --binary-files=without-match "TODO\|FIXME" --exclude-dir={.git,node_modules,vendor} .` + + // Improved exclusion: Exclude grabitsh_chunk files and root.go itself to avoid recursive results + todoCommand := `grep -r -n --exclude-dir={.git,node_modules,vendor} --exclude=\*.min.js --exclude=\*.min.css --exclude=\*grabitsh_chunk_*.txt --exclude=root.go --binary-files=without-match "TODO\|FIXME" .` + + // Execute the command todos := runCommand("bash", "-c", todoCommand) - if todos == "" { + + // Handle cases where grep fails to find anything or errors out + if strings.TrimSpace(todos) == "" { buffer.WriteString("No TODOs or FIXMEs found.\n") } else { + buffer.WriteString("Found TODOs and FIXMEs:\n") buffer.WriteString(todos) } } @@ -167,15 +175,6 @@ func collectPerformanceMetrics(buffer *bytes.Buffer) { buffer.WriteString(runCommand("bash", "-c", "find . -name '*.go' -not -path './.git/*' | xargs wc -l")) } -func runCommand(name string, arg ...string) string { - cmd := exec.Command(name, arg...) - out, err := cmd.CombinedOutput() - if err != nil { - return fmt.Sprintf("Error running command %s %s: %v\n", name, strings.Join(arg, " "), err) - } - return string(out) -} - func finalizeOutput(content string) { switch outputMethod { case "stdout": @@ -211,29 +210,69 @@ func writeChunks(content string) error { chunks := splitIntoChunks(content, chunkSize) totalChunks := len(chunks) - preamble := `This is part %d of %d of the output from Grabit.sh, a tool that analyzes Git repositories. + // Define the preamble formatting function + getPreamble := func(part, total, estimatedTokens int) string { + return fmt.Sprintf(`This is part %d of %d of the output from Grabit.sh, a tool designed to gather structured data from Git repositories. -Purpose: This output provides a comprehensive analysis of a Git repository, including its structure, configuration, dependencies, and potential issues. Use this information to understand the project, identify areas for improvement, and make informed decisions about the codebase. +Purpose: This output presents raw information about the structure, configuration, dependencies, and other technical details of a Git repository. This data will serve as a foundation for further analysis, prompting additional questions, and preparing for the next phase of investigation. Instructions: -1. Read through the information provided in this chunk. -2. If this is not the final chunk, wait for the next one before drawing conclusions. -3. Use the information to answer questions about the repository, suggest improvements, or identify potential issues. -4. Pay attention to sections like security analysis, performance metrics, and detected project types. +1. Carefully review the information provided in this chunk. +2. If this is not the final chunk, continue gathering all chunks before asking questions or proceeding to any analysis. +3. Pay close attention to any areas that seem incomplete or may require further clarification in phase 2. +4. Identify **missing pieces** of the project and consider requesting additional data or clarification. +5. Flag any **gaps or uncertainties** for deeper investigation in subsequent phases. + +**Content of Chunk %d/%d (Estimated %d tokens):** + +`, part, total, part, total, estimatedTokens) + } + + // Define the final chunk message + getFinalChunkMessage := func(part, total int) string { + if part < total { + return fmt.Sprintf("\n**Continue to next chunk (Chunk %d/%d)**\n", part+1, total) + } + return "\n**Final chunk—no more parts to follow.**\n" + } -Content of Chunk %d/%d (Estimated %d tokens): + // Add automatic next-phase flags + addNextPhaseFlags := func(content string) string { + flags := "" + if !strings.Contains(content, "database") { + flags += "\n**Flag: Missing database configuration.**" + } + if !strings.Contains(content, "test") { + flags += "\n**Flag: No testing framework detected.**" + } + return content + flags + } -` + // Add performance summary + addPerformanceSummary := func(content string, estimatedTokens int) string { + return content + fmt.Sprintf("\n### Performance Summary ###\n- Processed %d tokens in this chunk.\n", estimatedTokens) + } for i, chunk := range chunks { + // Estimate the number of tokens + estimatedTokens := len(strings.Fields(chunk)) + len(chunk)/3 + + // Build the full content by combining the preamble, chunk, and additional features + fullContent := getPreamble(i+1, totalChunks, estimatedTokens) + fullContent += addNextPhaseFlags(chunk) + fullContent = addPerformanceSummary(fullContent, estimatedTokens) + fullContent += getFinalChunkMessage(i+1, totalChunks) + + // Write the chunk to file filename := fmt.Sprintf("grabitsh_chunk_%d.txt", i+1) - estimatedTokens := len(strings.Fields(chunk)) + len(chunk)/3 // Same estimation as in splitIntoChunks - fullContent := fmt.Sprintf(preamble, i+1, totalChunks, i+1, totalChunks, estimatedTokens) + chunk if err := os.WriteFile(filename, []byte(fullContent), 0644); err != nil { return fmt.Errorf("failed to write chunk %d: %v", i+1, err) } + + // Output success message color.Green("Chunk %d/%d written to %s (Estimated %d tokens)", i+1, totalChunks, filename, estimatedTokens) } + return nil } diff --git a/cmd/grabitsh/testing.go b/cmd/grabitsh/testing.go new file mode 100644 index 0000000..93e0d6d --- /dev/null +++ b/cmd/grabitsh/testing.go @@ -0,0 +1,32 @@ +package grabitsh + +import ( + "path/filepath" +) + +func analyzeTestingFrameworks() []string { + var frameworks []string + + testingFrameworks := map[string]string{ + "test": "Go testing", + "spec": "RSpec (Ruby)", + "test.js": "JavaScript testing", + "test.py": "Python testing", + "__tests__": "Jest (JavaScript)", + "pytest": "pytest (Python)", + "phpunit.xml": "PHPUnit", + "junit": "JUnit (Java)", + "test_*.py": "unittest (Python)", + "*_test.go": "Go testing", + "*.spec.ts": "Jasmine/Mocha (TypeScript)", + "test_*.rb": "Minitest (Ruby)", + } + + for pattern, framework := range testingFrameworks { + if files, _ := filepath.Glob("**/" + pattern); len(files) > 0 { + frameworks = append(frameworks, framework) + } + } + + return frameworks +} diff --git a/cmd/grabitsh/utils.go b/cmd/grabitsh/utils.go index 18b6683..02036be 100644 --- a/cmd/grabitsh/utils.go +++ b/cmd/grabitsh/utils.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" "os" + "os/exec" "path/filepath" "strings" @@ -13,19 +14,34 @@ import ( const maxContentLength = 1000 -// Utility function to check if a file exists func fileExists(filename string) bool { info, err := os.Stat(filename) return err == nil && !info.IsDir() } -// Utility function to check if a directory exists func dirExists(dirname string) bool { info, err := os.Stat(dirname) return err == nil && info.IsDir() } -// Utility function to parse basic text files +func runCommand(name string, arg ...string) string { + cmd := exec.Command(name, arg...) + out, err := cmd.CombinedOutput() + if err != nil { + return fmt.Sprintf("Error running command %s %s: %v\n", name, strings.Join(arg, " "), err) + } + return string(out) +} + +func appendUnique(slice []string, item string) []string { + for _, element := range slice { + if element == item { + return slice + } + } + return append(slice, item) +} + func parseBasicTextFile(filename string, buffer *bytes.Buffer) { fileContent, err := os.ReadFile(filename) if err != nil { @@ -36,7 +52,10 @@ func parseBasicTextFile(filename string, buffer *bytes.Buffer) { buffer.Write(fileContent) } -// Utility function to parse JSON files +type JSONData struct { + Data map[string]interface{} `json:"data"` +} + func parseJSONFile(filename string, buffer *bytes.Buffer) { fileContent, err := os.ReadFile(filename) if err != nil { @@ -44,19 +63,18 @@ func parseJSONFile(filename string, buffer *bytes.Buffer) { return } - var parsed map[string]interface{} - if err := json.Unmarshal(fileContent, &parsed); err != nil { + var jsonData JSONData + if err := json.Unmarshal(fileContent, &jsonData); err != nil { buffer.WriteString(fmt.Sprintf("Error parsing %s: %v\n", filename, err)) return } buffer.WriteString(fmt.Sprintf("\nParsed %s JSON:\n", filename)) - for key, value := range parsed { + for key, value := range jsonData.Data { buffer.WriteString(fmt.Sprintf(" %s: %v\n", key, value)) } } -// Utility function to parse YAML files func parseYAMLFile(filename string, buffer *bytes.Buffer) { fileContent, err := os.ReadFile(filename) if err != nil { @@ -64,19 +82,18 @@ func parseYAMLFile(filename string, buffer *bytes.Buffer) { return } - var parsed map[interface{}]interface{} + var parsed map[string]interface{} if err := yaml.Unmarshal(fileContent, &parsed); err != nil { buffer.WriteString(fmt.Sprintf("Error parsing %s: %v\n", filename, err)) return } buffer.WriteString(fmt.Sprintf("\nParsed %s YAML:\n", filename)) - for key := range parsed { - buffer.WriteString(fmt.Sprintf(" %v\n", key)) + for key, value := range parsed { + buffer.WriteString(fmt.Sprintf(" %s: %v\n", key, value)) } } -// Utility function to truncate file content if it exceeds a certain length func truncateContent(content string) string { if len(content) > maxContentLength { return content[:maxContentLength] + "...\n(content truncated)" @@ -84,7 +101,6 @@ func truncateContent(content string) string { return content } -// Utility function to sanitize .env file contents func sanitizeEnvFile(content string) string { lines := strings.Split(content, "\n") var sanitized []string @@ -99,17 +115,17 @@ func sanitizeEnvFile(content string) string { return strings.Join(sanitized, "\n") } -// Utility function to check for files with specific extensions func fileExistsWithExtensions(baseName string, extensions []string) bool { for _, ext := range extensions { - if fileExists(baseName + ext) { - return true + if strings.HasSuffix(baseName+ext, ext) { + if fileExists(baseName + ext) { + return true + } } } return false } -// Utility function to parse Git config func parseGitConfig(filename string, buffer *bytes.Buffer) { fileContent, err := os.ReadFile(filename) if err != nil { @@ -119,7 +135,6 @@ func parseGitConfig(filename string, buffer *bytes.Buffer) { buffer.WriteString(fmt.Sprintf("Git config contents:\n%s", truncateContent(string(fileContent)))) } -// Utility function to parse GitHub Actions workflows func parseGithubActionsWorkflows(directory string, buffer *bytes.Buffer) { err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -136,7 +151,6 @@ func parseGithubActionsWorkflows(directory string, buffer *bytes.Buffer) { } } -// Utility function to parse Dockerfile func parseDockerfile(filename string, buffer *bytes.Buffer) { fileContent, err := os.ReadFile(filename) if err != nil { @@ -150,7 +164,6 @@ func parseDockerfile(filename string, buffer *bytes.Buffer) { } } -// Utility function to parse Docker directories func parseDockerDir(directory string, buffer *bytes.Buffer) { err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -167,7 +180,6 @@ func parseDockerDir(directory string, buffer *bytes.Buffer) { } } -// Utility function to parse Kubernetes files func parseK8sFiles(directory string, buffer *bytes.Buffer) { err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -184,7 +196,6 @@ func parseK8sFiles(directory string, buffer *bytes.Buffer) { } } -// Utility function to parse Helm chart files func parseHelmFiles(directory string, buffer *bytes.Buffer) { err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -201,7 +212,6 @@ func parseHelmFiles(directory string, buffer *bytes.Buffer) { } } -// Utility function to parse directories (for cloud providers) func parseDirectoryContents(directory string, buffer *bytes.Buffer) { err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -216,7 +226,6 @@ func parseDirectoryContents(directory string, buffer *bytes.Buffer) { } } -// Utility function to parse Gemfile func parseGemfile(filename string, buffer *bytes.Buffer) error { fileContent, err := os.ReadFile(filename) if err != nil { @@ -228,7 +237,6 @@ func parseGemfile(filename string, buffer *bytes.Buffer) error { return nil } -// Utility function to parse package.json func parsePackageJSON(filename string, buffer *bytes.Buffer) error { fileContent, err := os.ReadFile(filename) if err != nil { diff --git a/grabitsh b/grabitsh deleted file mode 100755 index 751de8e..0000000 Binary files a/grabitsh and /dev/null differ