diff --git a/README.md b/README.md index bb0df362b..63910b382 100644 --- a/README.md +++ b/README.md @@ -156,6 +156,8 @@ - [Add Builds to Indexing Configuration](#add-builds-to-indexing-configuration) - [Request Graph Scan](#request-graph-scan) - [Retrieve the Graph Scan Results](#retrieve-the-graph-scan-results) + - [Request Graph Enrich](#request-graph-enrich) + - [Retrieve the Graph Enrich Results](#retrieve-the-graph-enrich-results) - [Generate Vulnerabilities Report](#generate-vulnerabilities-report) - [Get Vulnerabilities Report Details](#get-vulnerabilities-report-details) - [Get Vulnerabilities Report Content](#get-vulnerabilities-report-content) @@ -222,7 +224,7 @@ - [Distribute Release Bundle](#distribute-release-bundle) - [Delete Release Bundle Version](#delete-release-bundle-version) - [Delete Release Bundle Version Promotion](#delete-release-bundle-version-promotion) - - [Export Release Bundle](#export-release-bundle) + - [Export Release Bundle Archive](#export-release-bundle-archive) - [Import Release Bundle](#import-release-bundle) - [Remote Delete Release Bundle](#remote-delete-release-bundle) - [Lifecycle APIs](#lifecycle-apis) @@ -453,7 +455,10 @@ params.ChecksumsCalcEnabled = false targetProps := utils.NewProperties() targetProps.AddProperty("key1", "val1") params.TargetProps = targetProps - +// When using the 'archive' option for upload, we can control the target path inside the uploaded archive using placeholders. This operation determines the TargetPathInArchive value. +TargetPathInArchive := "archive/path/" +// Size limit for files to be uploaded. +SizeLimit= &fspatterns.SizeThreshold{SizeInBytes: 10000, Condition: fspatterns.LessThan} totalUploaded, totalFailed, err := rtManager.UploadFiles(params) ``` @@ -482,7 +487,9 @@ params.Retries = 5 params.SplitCount = 2 // MinSplitSize default value: 5120 params.MinSplitSize = 7168 - +// Optional fields to avoid AQL request +Sha256 = "5feceb66ffc86f38d952786c6d696c79c2dbc239dd4e91b46729d73a27fb57e9" +Size = 1000 totalDownloaded, totalFailed, err := rtManager.DownloadFiles(params) ``` @@ -2003,6 +2010,62 @@ scanId, err := xrayManager.ScanGraph(graphScanParams) scanResults, err := xrayManager.GetScanGraphResults(scanId) ``` +#### Request Graph Enrich + +```go +graphImportParams := &XrayGraphImportParams{} +// Dependency tree. Each node must have a component identifier, see https://www.jfrog.com/confluence/display/JFROG/Xray+REST+API#XrayRESTAPI-ComponentIdentifiers. +graphScanParams.SBOMInput = "{ + "bomFormat": "CycloneDX", + "specVersion": "1.4", + "serialNumber": "urn:uuid:3c94db59-0dbf-41cd-49e8-c4518ac2ef3c", + "version": 1, + "metadata": { + "timestamp": "2024-05-22T14:52:40Z", + "tools": [ + { + "vendor": "JFrog Inc.", + "name": "Xray", + "version": "3.95.7" + } + ], + "component": { + "type": "container", + "name": "jfrog/artifactory-pro:sha256", + "version": "2e774ffb112bcaef62804d97e6db3dc67b9169b440838b12ba12584cba2c5251" + } + }, + "components": [ + { + "bom-ref": "pkg:Oci/jfrog%2Fartifactory-pro:sha256@2e774ffb112bcaef62804d97e6db3dc67b9169b440838b12ba12584cba2c5251", + "type": "application", + "name": "jfrog/artifactory-pro:sha256", + "version": "2e774ffb112bcaef62804d97e6db3dc67b9169b440838b12ba12584cba2c5251", + "hashes": [ + { + "alg": "SHA-256", + "content": "2e774ffb112bcaef62804d97e6db3dc67b9169b440838b12ba12584cba2c5251" + } + ], + "licenses": [], + "purl": "pkg:Oci/jfrog%2Fartifactory-pro:sha256@2e774ffb112bcaef62804d97e6db3dc67b9169b440838b12ba12584cba2c5251" + } + ], + "dependencies": [] + } + +" +scanId, err := xrayManager.ImportGraph(graphImportParams) +``` + +#### Retrieve the Graph Enrich Results + +```go +// scanId should be received from xrayManager.ImportGraph(graphImportParams) request. +enrichResults, err := xrayManager.GetImportGraphResults(scanId) +``` + + #### Generate Vulnerabilities Report ```go diff --git a/artifactory/services/delete.go b/artifactory/services/delete.go index 9ce3011ab..9519c88e4 100644 --- a/artifactory/services/delete.go +++ b/artifactory/services/delete.go @@ -210,7 +210,7 @@ func removeNotToBeDeletedDirs(specFile *utils.CommonParams, ds *DeleteService, d if err != nil { return nil, err } - bufferFiles, err := utils.FilterCandidateToBeDeleted(deleteCandidates, resultWriter, "folder") + bufferFiles, err := utils.FilterCandidateToBeDeleted(deleteCandidates, resultWriter, utils.Folder) if len(bufferFiles) > 0 { defer func() { for _, file := range bufferFiles { @@ -256,7 +256,7 @@ func getSortedArtifactsToNotDelete(specFile *utils.CommonParams, ds *DeleteServi // 1. Go sorts strings differently from Artifactory's database, when the strings include special chars, such as dashes. // 2. Artifactory sorts by database columns, so directories will be sorted differently than files, // because the path and name cols have different values. - sortedResults, err := utils.FilterCandidateToBeDeleted(tempResults, resultWriter, "file") + sortedResults, err := utils.FilterCandidateToBeDeleted(tempResults, resultWriter, utils.File) if err != nil { return nil, err } diff --git a/artifactory/services/download.go b/artifactory/services/download.go index 14f9c7f43..d0857ccd1 100644 --- a/artifactory/services/download.go +++ b/artifactory/services/download.go @@ -2,16 +2,16 @@ package services import ( "errors" + "github.com/jfrog/build-info-go/entities" + biutils "github.com/jfrog/build-info-go/utils" + ioutils "github.com/jfrog/gofrog/io" + "github.com/jfrog/gofrog/version" "net/http" "os" "path" "path/filepath" "sort" - - biutils "github.com/jfrog/build-info-go/utils" - "github.com/jfrog/gofrog/version" - - "github.com/jfrog/build-info-go/entities" + "strings" "github.com/jfrog/jfrog-client-go/http/httpclient" @@ -164,18 +164,30 @@ func (ds *DownloadService) prepareTasks(producer parallel.Runner, expectedChan c errorsQueue.AddError(err) } } + var reader *content.ContentReader // Create handler function for the current group. fileHandlerFunc := ds.createFileHandlerFunc(downloadParams, successCounters) - // Search items. - log.Info("Searching items to download...") - switch downloadParams.GetSpecType() { - case utils.WILDCARD: - reader, err = ds.collectFilesUsingWildcardPattern(downloadParams) - case utils.BUILD: - reader, err = utils.SearchBySpecWithBuild(downloadParams.GetFile(), ds) - case utils.AQL: - reader, err = utils.SearchBySpecWithAql(downloadParams.GetFile(), ds, utils.SYMLINK) + // Check if we can avoid using AQL to get the file's info. + avoidAql, err := isFieldsProvidedToAvoidAql(downloadParams) + // Check for search errors. + if err != nil { + log.Error(err) + errorsQueue.AddError(err) + continue + } + if avoidAql { + reader, err = createResultsItemWithoutAql(downloadParams) + } else { + // Search items using AQL and get their details (size/checksum/etc.) from Artifactory. + switch downloadParams.GetSpecType() { + case utils.WILDCARD: + reader, err = utils.SearchBySpecWithPattern(downloadParams.GetFile(), ds, utils.SYMLINK) + case utils.BUILD: + reader, err = utils.SearchBySpecWithBuild(downloadParams.GetFile(), ds) + case utils.AQL: + reader, err = utils.SearchBySpecWithAql(downloadParams.GetFile(), ds, utils.SYMLINK) + } } // Check for search errors. if err != nil { @@ -197,8 +209,49 @@ func (ds *DownloadService) prepareTasks(producer parallel.Runner, expectedChan c }() } -func (ds *DownloadService) collectFilesUsingWildcardPattern(downloadParams DownloadParams) (*content.ContentReader, error) { - return utils.SearchBySpecWithPattern(downloadParams.GetFile(), ds, utils.SYMLINK) +func isFieldsProvidedToAvoidAql(downloadParams DownloadParams) (bool, error) { + if downloadParams.Sha256 != "" && downloadParams.Size != nil { + // If sha256 and size is provided, we can avoid using AQL to get the file's info. + return true, nil + } else if downloadParams.Sha256 == "" && downloadParams.Size == nil { + // If sha256 and size is missing, we can't avoid using AQL to get the file's info. + return false, nil + } + // If only one of the fields is provided, return an error. + return false, errors.New("both sha256 and size must be provided in order to avoid using AQL") +} + +func createResultsItemWithoutAql(downloadParams DownloadParams) (*content.ContentReader, error) { + writer, err := content.NewContentWriter(content.DefaultKey, true, false) + if err != nil { + return nil, err + } + defer ioutils.Close(writer, &err) + repo, path, name, err := breakFileDownloadPathToParts(downloadParams.GetPattern()) + if err != nil { + return nil, err + } + resultItem := &utils.ResultItem{ + Type: string(utils.File), + Repo: repo, + Path: path, + Name: name, + Size: *downloadParams.Size, + Sha256: downloadParams.Sha256, + } + writer.Write(*resultItem) + return content.NewContentReader(writer.GetFilePath(), writer.GetArrayKey()), nil +} + +func breakFileDownloadPathToParts(downloadPath string) (repo, path, name string, err error) { + if utils.IsWildcardPattern(downloadPath) { + return "", "", "", errorutils.CheckErrorf("downloading without AQL is not supported for the provided wildcard pattern: " + downloadPath) + } + parts := strings.Split(downloadPath, "/") + repo = parts[0] + path = strings.Join(parts[1:len(parts)-1], "/") + name = parts[len(parts)-1] + return } func (ds *DownloadService) produceTasks(reader *content.ContentReader, downloadParams DownloadParams, producer parallel.Runner, fileHandler fileHandlerFunc, errorsQueue *clientutils.ErrorsQueue) int { @@ -247,7 +300,7 @@ func (ds *DownloadService) produceTasks(reader *content.ContentReader, downloadP Target: downloadParams.GetTarget(), Flat: flat, } - if resultItem.Type != "folder" { + if resultItem.Type != string(utils.Folder) { if len(ds.rbGpgValidationMap) != 0 { // Gpg validation to the downloaded artifact err = rbGpgValidate(ds.rbGpgValidationMap, downloadParams.GetBundle(), resultItem) @@ -400,6 +453,7 @@ func (ds *DownloadService) downloadFile(downloadFileDetails *httpclient.Download LocalFileName: downloadFileDetails.LocalFileName, LocalPath: downloadFileDetails.LocalPath, ExpectedSha1: downloadFileDetails.ExpectedSha1, + ExpectedSha256: downloadFileDetails.ExpectedSha256, FileSize: downloadFileDetails.Size, SplitCount: downloadParams.SplitCount, Explode: downloadParams.Explode, @@ -509,15 +563,15 @@ func (ds *DownloadService) createFileHandlerFunc(downloadParams DownloadParams, return err } localPath, localFileName := fileutils.GetLocalPathAndFile(downloadData.Dependency.Name, downloadData.Dependency.Path, target, downloadData.Flat, placeholdersUsed) - if downloadData.Dependency.Type == "folder" { + if downloadData.Dependency.Type == string(utils.Folder) { return createDir(localPath, localFileName, logMsgPrefix) } if err = removeIfSymlink(filepath.Join(localPath, localFileName)); err != nil { return err } if downloadParams.IsSymlink() { - if isSymlink, e := ds.createSymlinkIfNeeded(ds.GetArtifactoryDetails().GetUrl(), localPath, localFileName, logMsgPrefix, downloadData, successCounters, threadId, downloadParams); isSymlink { - return e + if isSymlink, err := ds.createSymlinkIfNeeded(ds.GetArtifactoryDetails().GetUrl(), localPath, localFileName, logMsgPrefix, downloadData, successCounters, threadId, downloadParams); isSymlink { + return err } } if err = ds.downloadFileIfNeeded(downloadPath, localPath, localFileName, logMsgPrefix, downloadData, downloadParams); err != nil { @@ -592,6 +646,9 @@ type DownloadParams struct { SplitCount int PublicGpgKey string SkipChecksum bool + // Optional fields to avoid AQL request + Sha256 string + Size *int64 } func (ds *DownloadParams) IsFlat() bool { diff --git a/artifactory/services/download_test.go b/artifactory/services/download_test.go new file mode 100644 index 000000000..4f0b512c4 --- /dev/null +++ b/artifactory/services/download_test.go @@ -0,0 +1,38 @@ +package services + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestBreakFileDownloadPathToParts(t *testing.T) { + testCases := []struct { + name string + downloadPath string + expectedRepo string + expectedPath string + expectedName string + expectError bool + }{ + {"Single level path", "repo/file.txt", "repo", "", "file.txt", false}, + {"Multi-level path", "repo/folder/subfolder/file.txt", "repo", "folder/subfolder", "file.txt", false}, + {"Root level file", "repo/", "", "", "", true}, + {"Empty path", "", "", "", "", true}, + {"Invalid path", "file.txt", "", "", "", true}, + {"Wildcard path", "repo/*.txt", "", "", "", true}, + } + + for _, tt := range testCases { + t.Run(tt.name, func(t *testing.T) { + repo, path, name, err := breakFileDownloadPathToParts(tt.downloadPath) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + assert.Equal(t, tt.expectedRepo, repo) + assert.Equal(t, tt.expectedPath, path) + assert.Equal(t, tt.expectedName, name) + }) + } +} diff --git a/artifactory/services/fspatterns/sizethreshold.go b/artifactory/services/fspatterns/sizethreshold.go new file mode 100644 index 000000000..b5be2b52f --- /dev/null +++ b/artifactory/services/fspatterns/sizethreshold.go @@ -0,0 +1,27 @@ +package fspatterns + +// ThresholdCondition represents whether the threshold is for files above or below a specified size. +type ThresholdCondition int + +const ( + // GreaterThan is greater & equal + GreaterEqualThan ThresholdCondition = iota + // LessThan is only less + LessThan +) + +type SizeThreshold struct { + SizeInBytes int64 + Condition ThresholdCondition +} + +func (st SizeThreshold) IsSizeWithinThreshold(actualSizeInBytes int64) bool { + switch st.Condition { + case GreaterEqualThan: + return actualSizeInBytes >= st.SizeInBytes + case LessThan: + return actualSizeInBytes < st.SizeInBytes + default: + return false + } +} diff --git a/artifactory/services/fspatterns/sizethreshold_test.go b/artifactory/services/fspatterns/sizethreshold_test.go new file mode 100644 index 000000000..776832650 --- /dev/null +++ b/artifactory/services/fspatterns/sizethreshold_test.go @@ -0,0 +1,53 @@ +package fspatterns + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestSizeWithinLimits(t *testing.T) { + tests := []struct { + name string + st SizeThreshold + actualSize int64 + expectedResult bool + }{ + { + name: "Exact size as threshold and condition is GreaterEqualThan returns true", + st: SizeThreshold{SizeInBytes: 100, Condition: GreaterEqualThan}, + actualSize: 100, + expectedResult: true, + }, + { + name: "SizeInBytes above threshold and condition is GreaterEqualThan returns true", + st: SizeThreshold{SizeInBytes: 100, Condition: GreaterEqualThan}, + actualSize: 150, + expectedResult: true, + }, + { + name: "SizeInBytes below threshold and condition is GreaterEqualThan returns false", + st: SizeThreshold{SizeInBytes: 100, Condition: GreaterEqualThan}, + actualSize: 50, + expectedResult: false, + }, + { + name: "Exact size as threshold and condition is LessThan returns false", + st: SizeThreshold{SizeInBytes: 100, Condition: LessThan}, + actualSize: 100, + expectedResult: false, + }, + { + name: "SizeInBytes above threshold and condition is LessThan returns false", + st: SizeThreshold{SizeInBytes: 100, Condition: LessThan}, + actualSize: 150, + expectedResult: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.st.IsSizeWithinThreshold(tt.actualSize) + assert.Equal(t, tt.expectedResult, result) + }) + } +} diff --git a/artifactory/services/fspatterns/utils.go b/artifactory/services/fspatterns/utils.go index 01427d4d0..ea2f1b7f1 100644 --- a/artifactory/services/fspatterns/utils.go +++ b/artifactory/services/fspatterns/utils.go @@ -3,6 +3,7 @@ package fspatterns import ( "bytes" "fmt" + "github.com/jfrog/jfrog-client-go/utils/log" "os" "regexp" "strings" @@ -11,26 +12,17 @@ import ( "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" - "github.com/jfrog/jfrog-client-go/utils/log" ) // Return all the existing paths of the provided root path -func ListFiles(rootPath string, isRecursive, includeDirs, excludeWithRelativePath, isSymlink bool, excludePathPattern string) ([]string, error) { - var paths []string - var err error - if isRecursive { - paths, err = fileutils.ListFilesRecursiveWalkIntoDirSymlink(rootPath, !isSymlink) - } else { - paths, err = fileutils.ListFiles(rootPath, includeDirs) - } - if err != nil { - return paths, err - } - var rootFilter string - if excludeWithRelativePath { - rootFilter = rootPath - } - return filterFiles(rootFilter, paths, excludePathPattern) +func ListFiles(rootPath string, isRecursive, includeDirs, excludeWithRelativePath, preserveSymlink bool, excludePathPattern string) ([]string, error) { + return ListFilesFilterPatternAndSize(rootPath, isRecursive, includeDirs, excludeWithRelativePath, preserveSymlink, excludePathPattern, nil) +} + +// Return all the existing paths of the provided root path +func ListFilesFilterPatternAndSize(rootPath string, isRecursive, includeDirs, excludeWithRelativePath, preserveSymlink bool, excludePathPattern string, sizeThreshold *SizeThreshold) ([]string, error) { + filterFunc := filterFilesFunc(rootPath, includeDirs, excludeWithRelativePath, preserveSymlink, excludePathPattern, sizeThreshold) + return fileutils.ListFilesWithFilterFunc(rootPath, isRecursive, !preserveSymlink, filterFunc) } // Transform to regexp and prepare Exclude patterns to be used, exclusion patterns must be absolute paths. @@ -53,23 +45,41 @@ func PrepareExcludePathPattern(exclusions []string, patternType utils.PatternTyp return excludePathPattern } -func filterFiles(rootPath string, files []string, excludePathPattern string) (filteredFiles []string, err error) { - var excludedPath bool - for i := 0; i < len(files); i++ { - if files[i] == "." { - continue +// Returns a function that filters files according to the provided parameters +func filterFilesFunc(rootPath string, includeDirs, excludeWithRelativePath, preserveSymlink bool, excludePathPattern string, sizeThreshold *SizeThreshold) func(filePath string) (included bool, err error) { + return func(path string) (included bool, err error) { + if path == "." { + return false, nil + } + if !includeDirs { + isDir, err := fileutils.IsDirExists(path, preserveSymlink) + if err != nil || isDir { + return false, err + } } - excludedPath, err = isPathExcluded(strings.TrimPrefix(files[i], rootPath), excludePathPattern) + var isExcludedByPattern bool + isExcludedByPattern, err = isPathExcluded(path, excludePathPattern, rootPath, excludeWithRelativePath) if err != nil { - return + return false, err } - if !excludedPath { - filteredFiles = append(filteredFiles, files[i]) - } else { - log.Debug(fmt.Sprintf("The path '%s' is excluded", files[i])) + if isExcludedByPattern { + log.Debug(fmt.Sprintf("The path '%s' is excluded", path)) + return false, nil + } + + if sizeThreshold != nil { + fileInfo, err := fileutils.GetFileInfo(path, preserveSymlink) + if err != nil { + return false, errorutils.CheckError(err) + } + // Check if the file size is within the limits + if !fileInfo.IsDir() && !sizeThreshold.IsSizeWithinThreshold(fileInfo.Size()) { + log.Debug(fmt.Sprintf("The path '%s' is excluded", path)) + return false, nil + } } + return true, nil } - return } // Return the actual sub-paths that match the regex provided. @@ -116,9 +126,13 @@ func GetSingleFileToUpload(rootPath, targetPath string, flat bool) (utils.Artifa return utils.Artifact{LocalPath: rootPath, TargetPath: uploadPath, SymlinkTargetPath: symlinkPath}, nil } -func isPathExcluded(path string, excludePathPattern string) (excludedPath bool, err error) { +func isPathExcluded(path, excludePathPattern, rootPath string, excludeWithRelativePath bool) (excludedPath bool, err error) { if len(excludePathPattern) > 0 { + if excludeWithRelativePath { + path = strings.TrimPrefix(path, rootPath) + } excludedPath, err = regexp.MatchString(excludePathPattern, path) + err = errorutils.CheckError(err) } return } diff --git a/artifactory/services/fspatterns/utils_test.go b/artifactory/services/fspatterns/utils_test.go index fbfb1bea9..f3b6df547 100644 --- a/artifactory/services/fspatterns/utils_test.go +++ b/artifactory/services/fspatterns/utils_test.go @@ -1,6 +1,8 @@ package fspatterns import ( + "fmt" + "os" "path/filepath" "regexp" "testing" @@ -8,32 +10,45 @@ import ( "github.com/stretchr/testify/assert" ) -func TestFilterFiles(t *testing.T) { - data := []struct { - files []string +func TestFilterFilesFunc(t *testing.T) { + testCases := []struct { + file string ExcludePattern string root string - result []string + included bool }{ - {[]string{"file1", filepath.Join("dir", "file1"), "file2.zip"}, "^*.zip$", "", []string{"file1", filepath.Join("dir", "file1")}}, - {[]string{ - "file1", - "test.zip", - filepath.Join("test", "file1"), - filepath.Join("dir", "test", "should-be-filter"), - }, "(^.*test.*$)", "test", []string{"file1", "test.zip", filepath.Join("test", "file1")}}, - } - for _, d := range data { - got, err := filterFiles(d.root, d.files, d.ExcludePattern) - assert.NoError(t, err) - assert.Len(t, got, len(d.result)) - assert.Contains(t, got, d.files[0]) - assert.Contains(t, got, d.files[1]) + // Patterns with regex + {"file1", "^*.zip$", "", true}, + {"file2.zip", "^*.zip$", "", false}, + {"dir/file1", "^*.zip$", "", true}, + {"dir/dir2/file1.zip", "^*.zip$", "", false}, + + {"test/file1", "(^.*test.*$)", "test", true}, + {"dir/test/should-be-filter", "(^.*test.*$)", "test", false}, + {"file1", "(^.*test.*$)", "", true}, + {"file2.zip", "(^.*test.*$)", "", true}, + + // Patterns without regex (exact match) + {"file1", "file1", "", false}, + {"file2.zip", "file1", "", true}, + // No pattern + {"file1", "", "", true}, + {"file2.zip", "", "", true}, + } + + for _, tc := range testCases { + t.Run(fmt.Sprintf("File: %s, Pattern: %s, Root: %s", tc.file, tc.ExcludePattern, tc.root), func(t *testing.T) { + // Create the filter function with the mocked isPathExcluded + filterFunc := filterFilesFunc(tc.root, true, true, false, tc.ExcludePattern, nil) + excluded, err := filterFunc(tc.file) + assert.NoError(t, err) + assert.True(t, excluded == tc.included, "Expected included = %v, but got %v", tc.included, excluded) + }) } } func TestSearchPatterns(t *testing.T) { - data := []struct { + testCases := []struct { path string pattern string result []string @@ -41,7 +56,7 @@ func TestSearchPatterns(t *testing.T) { {filepath.Join("testdata", "a", "a3.zip"), "^*.zip$", []string{filepath.Join("testdata", "a", "a3")}}, {filepath.Join("testdata", "a", "a3"), "^*.zip$", []string{}}, } - for _, d := range data { + for _, d := range testCases { patternRegex, err := regexp.Compile(d.pattern) assert.NoError(t, err) @@ -51,3 +66,57 @@ func TestSearchPatterns(t *testing.T) { assert.Len(t, matches, len(d.result)) } } + +func TestFilterFilesFuncWithSizeThreshold(t *testing.T) { + rootPath := t.TempDir() + + // Create test files and directories + files := []struct { + path string + size int64 + }{ + {filepath.Join(rootPath, "file.txt"), 100}, + {filepath.Join(rootPath, "largefile.txt"), 2048}, + {filepath.Join(rootPath, "dir", "subfile.txt"), 50}, + {filepath.Join(rootPath, "equalfile.txt"), 1024}, + } + + for _, file := range files { + dir := filepath.Dir(file.path) + assert.NoError(t, os.MkdirAll(dir, 0755)) + f, err := os.Create(file.path) + assert.NoError(t, err) + assert.NoError(t, f.Truncate(file.size)) + assert.NoError(t, f.Close()) + } + + testCases := []struct { + name string + path string + sizeThreshold *SizeThreshold + includeDirs bool + preserveSymlink bool + expectInclude bool + }{ + {"Include file within size threshold", "file.txt", &SizeThreshold{SizeInBytes: 1024, Condition: LessThan}, true, false, true}, + {"Exclude file exceeding size threshold", "largefile.txt", &SizeThreshold{SizeInBytes: 1024, Condition: LessThan}, true, false, false}, + {"Include directory", "dir", nil, true, false, true}, + {"Include file in subdirectory within size threshold", filepath.Join("dir", "subfile.txt"), &SizeThreshold{SizeInBytes: 1024, Condition: LessThan}, true, false, true}, + {"Include file with size equal to threshold", "equalfile.txt", &SizeThreshold{SizeInBytes: 1024, Condition: GreaterEqualThan}, true, false, true}, + {"Exclude file below threshold with GreaterEqualThan", "file.txt", &SizeThreshold{SizeInBytes: 150, Condition: GreaterEqualThan}, true, false, false}, + {"Include file above threshold with GreaterEqualThan", "largefile.txt", &SizeThreshold{SizeInBytes: 150, Condition: GreaterEqualThan}, true, false, true}, + {"Exclude directory when includeDirs is false", "dir", nil, false, false, false}, + {"Include file when includeDirs is false", "file.txt", nil, false, false, true}, + {"Include root level file", "file.txt", nil, true, false, true}, + {"Include root level file with preserveSymlink true", "file.txt", nil, true, true, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + filterFunc := filterFilesFunc(rootPath, tc.includeDirs, false, tc.preserveSymlink, "", tc.sizeThreshold) + included, err := filterFunc(filepath.Join(rootPath, tc.path)) + assert.Equal(t, tc.expectInclude, included) + assert.NoError(t, err) + }) + } +} diff --git a/artifactory/services/movecopy.go b/artifactory/services/movecopy.go index 92a7f952a..ca3d3df5c 100644 --- a/artifactory/services/movecopy.go +++ b/artifactory/services/movecopy.go @@ -178,7 +178,7 @@ func (mc *MoveCopyService) createMoveCopyFileHandlerFunc(result *utils.Result) f return err } if strings.HasSuffix(destFile, "/") { - if resultItem.Type != "folder" { + if resultItem.Type != string(utils.Folder) { destFile += resultItem.Name } else { _, err = mc.createPathForMoveAction(destFile, logMsgPrefix) diff --git a/artifactory/services/upload.go b/artifactory/services/upload.go index d27f64182..a1360faea 100644 --- a/artifactory/services/upload.go +++ b/artifactory/services/upload.go @@ -8,7 +8,6 @@ import ( "net/http" "os" "path/filepath" - "regexp" "sort" "strconv" "strings" @@ -312,11 +311,12 @@ func addEscapingParenthesesForUpload(pattern, target, targetPathInArchive string func scanFilesByPattern(uploadParams UploadParams, rootPath string, progressMgr ioutils.ProgressMgr, vcsCache *clientutils.VcsCache, dataHandlerFunc UploadDataHandlerFunc) error { excludePathPattern := fspatterns.PrepareExcludePathPattern(uploadParams.Exclusions, uploadParams.GetPatternType(), uploadParams.IsRecursive()) - patternRegex, err := regexp.Compile(uploadParams.GetPattern()) - if errorutils.CheckError(err) != nil { + patternRegex, err := clientutils.GetRegExp(uploadParams.GetPattern()) + if err != nil { return err } - paths, err := fspatterns.ListFiles(rootPath, uploadParams.IsRecursive(), uploadParams.IsIncludeDirs(), false, uploadParams.IsSymlink(), excludePathPattern) + + paths, err := fspatterns.ListFilesFilterPatternAndSize(rootPath, uploadParams.IsRecursive(), uploadParams.IsIncludeDirs(), false, uploadParams.IsSymlink(), excludePathPattern, uploadParams.GetSizeLimit()) if err != nil { return err } @@ -584,7 +584,7 @@ func (us *UploadService) uploadSymlink(targetPath, logMsgPrefix string, httpClie if err != nil { return } - resp, body, err = utils.UploadFile("", targetPath, logMsgPrefix, &us.ArtDetails, details, httpClientsDetails, us.client, uploadParams.ChecksumsCalcEnabled, nil) + resp, body, err = utils.UploadFile("", targetPath, logMsgPrefix, &us.ArtDetails, details, httpClientsDetails, us.client, uploadParams.ChecksumsCalcEnabled, us.Progress) return } @@ -609,6 +609,9 @@ func (us *UploadService) doUpload(artifact UploadData, targetUrlWithProps, logMs } if isSuccessfulUploadStatusCode(resp.StatusCode) { checksumDeployed = true + if us.Progress != nil { + us.Progress.IncrementGeneralProgress() + } return } } @@ -715,6 +718,8 @@ type UploadParams struct { Archive string // When using the 'archive' option for upload, we can control the target path inside the uploaded archive using placeholders. This operation determines the TargetPathInArchive value. TargetPathInArchive string + // Size limit for files to be uploaded. + SizeLimit *fspatterns.SizeThreshold } func NewUploadParams() UploadParams { @@ -749,6 +754,10 @@ func (up *UploadParams) GetDebian() string { return up.Deb } +func (up *UploadParams) GetSizeLimit() *fspatterns.SizeThreshold { + return up.SizeLimit +} + type UploadData struct { Artifact clientutils.Artifact TargetProps *utils.Properties @@ -773,6 +782,9 @@ func (us *UploadService) createArtifactHandlerFunc(uploadResult *utils.Result, u if err != nil { return } + if us.Progress != nil { + us.Progress.IncrementGeneralProgress() + } uploaded = true } else { // Upload file diff --git a/artifactory/services/utils/aqlquerybuilder.go b/artifactory/services/utils/aqlquerybuilder.go index 16e180b0c..cd32e5c61 100644 --- a/artifactory/services/utils/aqlquerybuilder.go +++ b/artifactory/services/utils/aqlquerybuilder.go @@ -204,14 +204,23 @@ func encodeForBuildInfoRepository(value string) string { } func CreateAqlQueryForLatestCreated(repo, path string) string { + return createAqlQueryForLatestCreated(File, repo, path) +} + +func CreateAqlQueryForLatestCreatedFolder(repo, path string) string { + return createAqlQueryForLatestCreated(Folder, repo, path) +} + +func createAqlQueryForLatestCreated(itemType ResultItemType, repo, path string) string { itemsPart := `items.find({` + + `"type": "%s",` + `"repo": "%s",` + `"path": {"$match": "%s"}` + `})` + `.sort({%s})` + `.limit(1)` - return fmt.Sprintf(itemsPart, repo, path, buildSortQueryPart([]string{"created"}, "desc")) + return fmt.Sprintf(itemsPart, itemType, repo, path, buildSortQueryPart([]string{"created"}, "desc")) } func prepareSearchPattern(pattern string, repositoryExists bool) string { diff --git a/artifactory/services/utils/aqlquerybuilder_test.go b/artifactory/services/utils/aqlquerybuilder_test.go index cd108abc2..d0f12d79c 100644 --- a/artifactory/services/utils/aqlquerybuilder_test.go +++ b/artifactory/services/utils/aqlquerybuilder_test.go @@ -122,6 +122,21 @@ func assertSortBody(t *testing.T, actual, expected string) { func TestCreateAqlQueryForLatestCreated(t *testing.T) { actual := CreateAqlQueryForLatestCreated("repo", "name") expected := `items.find({` + + `"type": "` + string(File) + `",` + + `"repo": "repo",` + + `"path": {"$match": "name"}` + + `})` + + `.sort({"$desc":["created"]})` + + `.limit(1)` + if actual != expected { + t.Error("The function CreateAqlQueryForLatestCreated expected to return the string:\n'" + expected + "'.\nbut returned:\n'" + actual + "'.") + } +} + +func TestCreateAqlQueryForLatestCreatedFolder(t *testing.T) { + actual := CreateAqlQueryForLatestCreatedFolder("repo", "name") + expected := `items.find({` + + `"type": "` + string(Folder) + `",` + `"repo": "repo",` + `"path": {"$match": "name"}` + `})` + diff --git a/artifactory/services/utils/deleteutils.go b/artifactory/services/utils/deleteutils.go index 1e44cd986..8ad49b7a8 100644 --- a/artifactory/services/utils/deleteutils.go +++ b/artifactory/services/utils/deleteutils.go @@ -2,11 +2,9 @@ package utils import ( "errors" - "regexp" "strings" "github.com/jfrog/jfrog-client-go/utils" - "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/content" ) @@ -16,9 +14,9 @@ func WildcardToDirsPath(deletePattern, searchResult string) (string, error) { } regexpPattern := "^" + strings.ReplaceAll(deletePattern, "*", "([^/]*|.*)") - r, err := regexp.Compile(regexpPattern) + r, err := utils.GetRegExp(regexpPattern) if err != nil { - return "", errorutils.CheckError(err) + return "", err } groups := r.FindStringSubmatch(searchResult) @@ -97,14 +95,14 @@ func writeRemainCandidate(cw *content.ContentWriter, mergeResult *content.Conten } } -func FilterCandidateToBeDeleted(deleteCandidates *content.ContentReader, resultWriter *content.ContentWriter, candidateType string) ([]*content.ContentReader, error) { +func FilterCandidateToBeDeleted(deleteCandidates *content.ContentReader, resultWriter *content.ContentWriter, candidateType ResultItemType) ([]*content.ContentReader, error) { paths := make(map[string]content.SortableContentItem) pathsKeys := make([]string, 0, utils.MaxBufferSize) toBeDeleted := []*content.ContentReader{} for candidate := new(ResultItem); deleteCandidates.NextRecord(candidate) == nil; candidate = new(ResultItem) { // Save all candidates, of the requested type, to a different temp file. - if candidate.Type == candidateType { - if candidateType == "folder" && candidate.Name == "." { + if candidate.Type == string(candidateType) { + if candidateType == Folder && candidate.Name == "." { continue } pathsKeys = append(pathsKeys, candidate.GetItemRelativePath()) diff --git a/artifactory/services/utils/deleteutils_test.go b/artifactory/services/utils/deleteutils_test.go index ce197ad15..c00a78938 100644 --- a/artifactory/services/utils/deleteutils_test.go +++ b/artifactory/services/utils/deleteutils_test.go @@ -96,7 +96,7 @@ func TestFilterCandidateToBeDeleted(t *testing.T) { oldMaxSize := utils.MaxBufferSize defer func() { utils.MaxBufferSize = oldMaxSize }() utils.MaxBufferSize = 3 - sortedFiles, err := FilterCandidateToBeDeleted(deleteCandidates, resultWriter, "folder") + sortedFiles, err := FilterCandidateToBeDeleted(deleteCandidates, resultWriter, Folder) assert.Len(t, sortedFiles, 3) assert.NoError(t, err) for i, val := range sortedFiles { diff --git a/artifactory/services/utils/searchutil.go b/artifactory/services/utils/searchutil.go index 4e625e9ba..37e0e61ed 100644 --- a/artifactory/services/utils/searchutil.go +++ b/artifactory/services/utils/searchutil.go @@ -36,6 +36,7 @@ const ( // Use this function when searching by build without pattern or aql. // Collect build artifacts and build dependencies separately, then merge the results into one reader. func SearchBySpecWithBuild(specFile *CommonParams, flags CommonConf) (readerContent *content.ContentReader, err error) { + log.Info("Searching items related to a build...") buildName, buildNumber, err := GetBuildNameAndNumberFromBuildIdentifier(specFile.Build, specFile.Project, flags) if err != nil { return nil, err @@ -189,6 +190,7 @@ func SearchBySpecWithPattern(specFile *CommonParams, flags CommonConf, requiredA // Use this function when running Aql with pattern func SearchBySpecWithAql(specFile *CommonParams, flags CommonConf, requiredArtifactProps RequiredArtifactProps) (reader *content.ContentReader, err error) { + log.Info("Searching for items in Artifactory...") // Execute the search according to provided aql in specFile. var fetchedProps *content.ContentReader query := BuildQueryFromSpecFile(specFile, requiredArtifactProps) @@ -340,6 +342,13 @@ type SearchBasedContentItem interface { GetType() string } +type ResultItemType string + +const ( + File ResultItemType = "file" + Folder ResultItemType = "folder" +) + type ResultItem struct { Repo string `json:"repo,omitempty"` Path string `json:"path,omitempty"` @@ -368,7 +377,7 @@ type Stat struct { } func (item ResultItem) GetSortKey() string { - if item.Type == "folder" { + if item.Type == string(Folder) { return appendFolderSuffix(item.GetItemRelativePath()) } return item.GetItemRelativePath() @@ -389,7 +398,7 @@ func (item ResultItem) GetItemRelativePath() string { url := item.Repo url = path.Join(url, item.Path, item.Name) - if item.Type == "folder" { + if item.Type == string(Folder) { url = appendFolderSuffix(url) } return url @@ -498,12 +507,12 @@ func FilterTopChainResults(readerRecord SearchBasedContentItem, reader *content. continue } rPath := resultItem.GetItemRelativePath() - if resultItem.GetType() == "folder" && !strings.HasSuffix(rPath, "/") { + if resultItem.GetType() == string(Folder) && !strings.HasSuffix(rPath, "/") { rPath += "/" } if prevFolder == "" || !strings.HasPrefix(rPath, prevFolder) { writer.Write(resultItem) - if resultItem.GetType() == "folder" { + if resultItem.GetType() == string(Folder) { prevFolder = rPath } } diff --git a/artifactory/services/utils/tests/xray/consts.go b/artifactory/services/utils/tests/xray/consts.go index 66146282f..e30db20a9 100644 --- a/artifactory/services/utils/tests/xray/consts.go +++ b/artifactory/services/utils/tests/xray/consts.go @@ -2,6 +2,15 @@ package xray import xrayServices "github.com/jfrog/jfrog-client-go/xray/services" +const ScanResponse = ` +{ +"scan_id": "3472b4e2-bddc-11ee-a9c9-acde48001122", + "vulnerabilities": [{ + "summary": "test", + "severity": "high" + }] +} +` const FatalErrorXrayScanResponse = ` { "errors": [{"status":-1}, {"status":500}] @@ -1418,6 +1427,8 @@ const BuildScanResultsResponse = ` ` const xscVersionResponse = `{"xsc_version": "1.0.0"}` +const scanIdResponse = `{"scan_id": "3472b4e2-bddc-11ee-a9c9-acde48001122"}` + const XscGitInfoResponse = `{"multi_scan_id": "3472b4e2-bddc-11ee-a9c9-acde48001122"}` const XscGitInfoBadResponse = `"failed create git info request: git_repo_url field must contain value"` diff --git a/artifactory/services/utils/tests/xray/server.go b/artifactory/services/utils/tests/xray/server.go index 79a4aa724..6a38ff8c8 100644 --- a/artifactory/services/utils/tests/xray/server.go +++ b/artifactory/services/utils/tests/xray/server.go @@ -197,6 +197,28 @@ func xscGetVersionHandlerFunc(t *testing.T) func(w http.ResponseWriter, r *http. } } +func enrichGetScanId(t *testing.T) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodPost { + _, err := fmt.Fprint(w, scanIdResponse) + assert.NoError(t, err) + return + } + http.Error(w, "Invalid enrich get scan id request", http.StatusBadRequest) + } +} + +func enrichGetResults(t *testing.T) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet { + _, err := fmt.Fprint(w, ScanResponse) + assert.NoError(t, err) + return + } + http.Error(w, "Invalid enrich get results request", http.StatusBadRequest) + } +} + func xscGitInfoHandlerFunc(t *testing.T) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { req, err := io.ReadAll(r.Body) @@ -227,6 +249,9 @@ func StartXrayMockServer(t *testing.T) int { handlers["/api/v1/entitlements/feature/"] = entitlementsHandler handlers["/xsc/api/v1/system/version"] = xscGetVersionHandlerFunc(t) handlers["/xsc/api/v1/gitinfo"] = xscGitInfoHandlerFunc(t) + handlers["/xray/api/v1/scan/import_xml"] = enrichGetScanId(t) + getEnrichResults := fmt.Sprintf("/xray/api/v1/scan/graph/%s", TestMultiScanId) + handlers[getEnrichResults] = enrichGetResults(t) handlers[fmt.Sprintf("/%s/", services.ReportsAPI)] = reportHandler handlers[fmt.Sprintf("/%s/", services.BuildScanAPI)] = buildScanHandler handlers["/"] = http.NotFound diff --git a/auth/sshlogin.go b/auth/sshlogin.go index 48ea5f0d0..de327697e 100644 --- a/auth/sshlogin.go +++ b/auth/sshlogin.go @@ -5,7 +5,6 @@ import ( "encoding/json" "io" "os" - "regexp" "strconv" "github.com/jfrog/jfrog-client-go/utils" @@ -141,9 +140,8 @@ func parseUrl(url string) (protocol, host string, port int, err error) { pattern1 := "^(.+)://(.+):([0-9].+)/$" pattern2 := "^(.+)://(.+)$" - var r *regexp.Regexp - r, err = regexp.Compile(pattern1) - if errorutils.CheckError(err) != nil { + r, err := utils.GetRegExp(pattern1) + if err != nil { return } groups := r.FindStringSubmatch(url) @@ -157,8 +155,7 @@ func parseUrl(url string) (protocol, host string, port int, err error) { return } - r, err = regexp.Compile(pattern2) - err = errorutils.CheckError(err) + r, err = utils.GetRegExp(pattern2) if err != nil { return } diff --git a/go.mod b/go.mod index ee75f9bc3..0674e0bc6 100644 --- a/go.mod +++ b/go.mod @@ -11,7 +11,8 @@ require ( github.com/gookit/color v1.5.4 github.com/jfrog/archiver/v3 v3.6.1 github.com/jfrog/build-info-go v1.9.29 - github.com/jfrog/gofrog v1.7.3 + github.com/jfrog/gofrog v1.7.4 + github.com/minio/sha256-simd v1.0.1 github.com/stretchr/testify v1.9.0 github.com/xanzy/ssh-agent v0.3.3 golang.org/x/crypto v0.25.0 @@ -38,7 +39,6 @@ require ( github.com/klauspost/compress v1.17.9 // indirect github.com/klauspost/cpuid/v2 v2.2.3 // indirect github.com/klauspost/pgzip v1.2.6 // indirect - github.com/minio/sha256-simd v1.0.1 // indirect github.com/nwaples/rardecode v1.1.3 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect @@ -60,4 +60,4 @@ require ( // replace github.com/jfrog/build-info-go => github.com/eyalbe4/build-info-go v1.8.6-0.20240610015232-844595d5a4f3 -// replace github.com/jfrog/gofrog => github.com/jfrog/gofrog dev \ No newline at end of file +// replace github.com/jfrog/gofrog => github.com/jfrog/gofrog dev diff --git a/go.sum b/go.sum index 13da62780..f55359840 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,8 @@ github.com/jfrog/archiver/v3 v3.6.1 h1:LOxnkw9pOn45DzCbZNFV6K0+6dCsQ0L8mR3ZcujO5 github.com/jfrog/archiver/v3 v3.6.1/go.mod h1:VgR+3WZS4N+i9FaDwLZbq+jeU4B4zctXL+gL4EMzfLw= github.com/jfrog/build-info-go v1.9.29 h1:3vJ+kbk9PpU6wjisXi9c4qISNpYkISh/NmB5mq1ZlSY= github.com/jfrog/build-info-go v1.9.29/go.mod h1:AzFJlN/yKfKuKcSBaGy5nNmKN1xzx6+XcRWAswCTLTA= -github.com/jfrog/gofrog v1.7.3 h1:34iaAZP9qY1dkjb8a0g0jn0u9/2k8RROx4hgnZNTAQw= -github.com/jfrog/gofrog v1.7.3/go.mod h1:4MH6RLH0XF96Y3PK7Cy9u8YvxN9cbe0VJHlzEfMpJDA= +github.com/jfrog/gofrog v1.7.4 h1:on4AeWef5LJUhGCigSjTS4Ez3n9l8+NiZlXH6UYp05c= +github.com/jfrog/gofrog v1.7.4/go.mod h1:jyGiCgiqSSR7k86hcUSu67XVvmvkkgWTmPsH25wI298= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= diff --git a/http/httpclient/client.go b/http/httpclient/client.go index 687f80093..acea6ba99 100644 --- a/http/httpclient/client.go +++ b/http/httpclient/client.go @@ -3,8 +3,8 @@ package httpclient import ( "bytes" "context" + "github.com/minio/sha256-simd" "strings" - //#nosec G505 -- sha1 is supported by Artifactory. "crypto/sha1" "encoding/hex" @@ -466,26 +466,23 @@ func saveToFile(downloadFileDetails *DownloadFileDetails, resp *http.Response, p var reader io.Reader if progress != nil { - readerProgress := progress.NewProgressReader(resp.ContentLength, "Downloading", downloadFileDetails.RelativePath) + readerProgress := progress.NewProgressReader(resp.ContentLength, "", downloadFileDetails.RelativePath) reader = readerProgress.ActionWithProgress(resp.Body) defer progress.RemoveProgress(readerProgress.GetId()) } else { reader = resp.Body } - if len(downloadFileDetails.ExpectedSha1) > 0 && !downloadFileDetails.SkipChecksum { - //#nosec G401 -- sha1 is supported by Artifactory. - actualSha1 := sha1.New() - writer := io.MultiWriter(actualSha1, out) + expectedSha, actualSha := handleExpectedSha(downloadFileDetails.ExpectedSha1, downloadFileDetails.ExpectedSha256) + if len(expectedSha) > 0 && !downloadFileDetails.SkipChecksum { + writer := io.MultiWriter(actualSha, out) _, err = io.Copy(writer, reader) if errorutils.CheckError(err) != nil { return err } - if hex.EncodeToString(actualSha1.Sum(nil)) != downloadFileDetails.ExpectedSha1 { - err = errors.New("checksum mismatch for " + fileName + ", expected: " + downloadFileDetails.ExpectedSha1 + ", actual: " + hex.EncodeToString(actualSha1.Sum(nil))) - } + err = validateChecksum(expectedSha, actualSha, downloadFileDetails.LocalFileName) } else { _, err = io.Copy(out, reader) } @@ -513,7 +510,7 @@ func (jc *HttpClient) DownloadFileConcurrently(flags ConcurrentDownloadFlags, lo var downloadProgressId int if progress != nil { - downloadProgress := progress.NewProgressReader(flags.FileSize, "Multipart download", flags.RelativePath) + downloadProgress := progress.NewProgressReader(flags.FileSize, "", flags.RelativePath) downloadProgressId = downloadProgress.GetId() // Aborting order matters. mergingProgress depends on the existence of downloadingProgress defer progress.RemoveProgress(downloadProgressId) @@ -660,11 +657,9 @@ func mergeChunks(chunksPaths []string, flags ConcurrentDownloadFlags) (err error err = errors.Join(err, errorutils.CheckError(destFile.Close())) }() var writer io.Writer - var actualSha1 hash.Hash - if len(flags.ExpectedSha1) > 0 { - //#nosec G401 -- Sha1 is supported by Artifactory. - actualSha1 = sha1.New() - writer = io.MultiWriter(actualSha1, destFile) + expectedSha, actualSha := handleExpectedSha(flags.ExpectedSha1, flags.ExpectedSha256) + if len(expectedSha) > 0 { + writer = io.MultiWriter(actualSha, destFile) } else { writer = io.MultiWriter(destFile) } @@ -691,13 +686,30 @@ func mergeChunks(chunksPaths []string, flags ConcurrentDownloadFlags) (err error return err } } - if len(flags.ExpectedSha1) > 0 && !flags.SkipChecksum { - if hex.EncodeToString(actualSha1.Sum(nil)) != flags.ExpectedSha1 { - err = errorutils.CheckErrorf("checksum mismatch for " + flags.LocalFileName + ", expected: " + flags.ExpectedSha1 + ", actual: " + hex.EncodeToString(actualSha1.Sum(nil))) - } + if len(expectedSha) > 0 && !flags.SkipChecksum { + err = validateChecksum(expectedSha, actualSha, flags.LocalFileName) } return err } +func validateChecksum(expectedSha string, actualSha hash.Hash, fileName string) (err error) { + actualShaString := hex.EncodeToString(actualSha.Sum(nil)) + if actualShaString != expectedSha { + err = errorutils.CheckErrorf("checksum mismatch for " + fileName + ", expected: " + expectedSha + ", actual: " + actualShaString) + } + return +} + +func handleExpectedSha(expectedSha1, expectedSha256 string) (expectedSha string, actualSha hash.Hash) { + if len(expectedSha1) > 0 { + expectedSha = expectedSha1 + //#nosec G401 -- Sha1 is supported by Artifactory. + actualSha = sha1.New() + } else if len(expectedSha256) > 0 { + expectedSha = expectedSha256 + actualSha = sha256.New() + } + return +} func (jc *HttpClient) downloadFileRange(flags ConcurrentDownloadFlags, start, end int64, currentSplit int, logMsgPrefix, chunkDownloadPath string, httpClientsDetails httputils.HttpClientDetails, progress ioutils.ProgressMgr, progressId int) (fileName string, resp *http.Response, err error) { @@ -821,14 +833,15 @@ func addUserAgentHeader(req *http.Request) { } type DownloadFileDetails struct { - FileName string `json:"FileName,omitempty"` - DownloadPath string `json:"DownloadPath,omitempty"` - RelativePath string `json:"RelativePath,omitempty"` - LocalPath string `json:"LocalPath,omitempty"` - LocalFileName string `json:"LocalFileName,omitempty"` - ExpectedSha1 string `json:"ExpectedSha1,omitempty"` - Size int64 `json:"Size,omitempty"` - SkipChecksum bool `json:"SkipChecksum,omitempty"` + FileName string `json:"FileName,omitempty"` + DownloadPath string `json:"DownloadPath,omitempty"` + RelativePath string `json:"RelativePath,omitempty"` + LocalPath string `json:"LocalPath,omitempty"` + LocalFileName string `json:"LocalFileName,omitempty"` + ExpectedSha1 string `json:"ExpectedSha1,omitempty"` + ExpectedSha256 string `json:"-"` + Size int64 `json:"Size,omitempty"` + SkipChecksum bool `json:"SkipChecksum,omitempty"` } type ConcurrentDownloadFlags struct { @@ -838,6 +851,7 @@ type ConcurrentDownloadFlags struct { LocalFileName string LocalPath string ExpectedSha1 string + ExpectedSha256 string FileSize int64 SplitCount int Explode bool diff --git a/tests/xrayenrich_test.go b/tests/xrayenrich_test.go new file mode 100644 index 000000000..2e2dcfa53 --- /dev/null +++ b/tests/xrayenrich_test.go @@ -0,0 +1,50 @@ +package tests + +import ( + "github.com/jfrog/jfrog-client-go/artifactory/services/utils/tests/xray" + "github.com/jfrog/jfrog-client-go/auth" + "github.com/jfrog/jfrog-client-go/http/jfroghttpclient" + xrayServices "github.com/jfrog/jfrog-client-go/xray/services" + "github.com/stretchr/testify/assert" + "strconv" + "testing" +) + +func initXrayEnrichTest(t *testing.T) (xrayServerPort int, xrayDetails auth.ServiceDetails, client *jfroghttpclient.JfrogHttpClient) { + var err error + initXrayTest(t) + xrayServerPort = xray.StartXrayMockServer(t) + xrayDetails = GetXrayDetails() + client, err = jfroghttpclient.JfrogClientBuilder(). + SetClientCertPath(xrayDetails.GetClientCertPath()). + SetClientCertKeyPath(xrayDetails.GetClientCertKeyPath()). + AppendPreRequestInterceptor(xrayDetails.RunPreRequestFunctions). + Build() + assert.NoError(t, err) + return +} + +func TestIsImportSucceeded(t *testing.T) { + xrayServerPort, xrayDetails, client := initXrayEnrichTest(t) + testsEnrichService := xrayServices.NewEnrichService(client) + testsEnrichService.XrayDetails = xrayDetails + testsEnrichService.XrayDetails.SetUrl("http://localhost:" + strconv.Itoa(xrayServerPort) + "/xray/") + + params := xrayServices.XrayGraphImportParams{SBOMInput: []byte("")} + result, err := testsEnrichService.ImportGraph(params) + assert.NoError(t, err) + assert.Equal(t, result, xray.TestMultiScanId) +} + +func TestGetImportResults(t *testing.T) { + xrayServerPort, xrayDetails, client := initXrayEnrichTest(t) + testsEnrichService := xrayServices.NewEnrichService(client) + testsEnrichService.XrayDetails = xrayDetails + testsEnrichService.XrayDetails.SetUrl("http://localhost:" + strconv.Itoa(xrayServerPort) + "/xray/") + + result, err := testsEnrichService.GetImportGraphResults(xray.TestMultiScanId) + assert.NoError(t, err) + assert.Equal(t, result.ScanId, xray.TestMultiScanId) + assert.Len(t, result.Vulnerabilities, 1) + +} diff --git a/utils/io/fileutils/files.go b/utils/io/fileutils/files.go index 119d5b8d3..d5841419c 100644 --- a/utils/io/fileutils/files.go +++ b/utils/io/fileutils/files.go @@ -164,48 +164,34 @@ func ListFilesRecursiveWalkIntoDirSymlink(path string, walkIntoDirSymlink bool) return } -// Return all files in the specified path who satisfy the filter func. Not recursive. -func ListFilesByFilterFunc(path string, filterFunc func(filePath string) (bool, error)) ([]string, error) { - sep := GetFileSeparator() - if !strings.HasSuffix(path, sep) { - path += sep - } - var fileList []string - files, _ := os.ReadDir(path) - path = strings.TrimPrefix(path, "."+sep) - - for _, f := range files { - filePath := path + f.Name() - satisfy, err := filterFunc(filePath) - if err != nil { - return nil, err - } - if !satisfy { - continue +// Return the recursive list of files and directories in the specified path +func ListFilesWithFilterFunc(rootPath string, isRecursive, walkIntoDirSymlink bool, filterFunc func(filePath string) (bool, error)) (fileList []string, err error) { + fileList = []string{} + err = gofrog.Walk(rootPath, func(path string, f os.FileInfo, err error) error { + if err != nil || path == rootPath { + return err } - exists, err := IsFileExists(filePath, false) + include, err := filterFunc(path) if err != nil { - return nil, err + return err } - if exists { - fileList = append(fileList, filePath) - continue + if include { + fileList = append(fileList, path) } - - // Checks if the filepath is a symlink. - if IsPathSymlink(filePath) { - // Gets the file info of the symlink. - file, err := GetFileInfo(filePath, false) - if errorutils.CheckError(err) != nil { - return nil, err + if !isRecursive { + // If the path is not in the root directory, and it's a directory we should skip it and not walk into it. + isDir, err := IsDirExists(path, false) + if err != nil { + return err } - // Checks if the symlink is a file. - if !file.IsDir() { - fileList = append(fileList, filePath) + if isDir { + return gofrog.ErrSkipDir } } - } - return fileList, nil + return nil + }, walkIntoDirSymlink) + err = errorutils.CheckError(err) + return } // Return the list of files and directories in the specified path @@ -384,7 +370,11 @@ func calcChecksumDetails(filePath string) (checksum entities.Checksum, err error func GetFileDetailsFromReader(reader io.Reader, includeChecksums bool) (details *FileDetails, err error) { details = new(FileDetails) - + if !includeChecksums { + // io.Copy copies from the reader to io.Discard and returns the number of bytes copied + details.Size, err = io.Copy(io.Discard, reader) + return + } pr, pw := io.Pipe() defer func() { err = errors.Join(err, errorutils.CheckError(pr.Close())) @@ -397,9 +387,7 @@ func GetFileDetailsFromReader(reader io.Reader, includeChecksums bool) (details details.Size, err = io.Copy(pw, reader) }() - if includeChecksums { - details.Checksum, err = calcChecksumDetailsFromReader(pr) - } + details.Checksum, err = calcChecksumDetailsFromReader(pr) return } @@ -551,6 +539,10 @@ func JsonEqual(filePath1, filePath2 string) (isEqual bool, err error) { // Compares provided Md5 and Sha1 to those of a local file. func IsEqualToLocalFile(localFilePath, md5, sha1 string) (bool, error) { + if md5 == "" || sha1 == "" { + // If not received checksums from downloaded file, no need to calculate local ones + return false, nil + } exists, err := IsFileExists(localFilePath, false) if err != nil { return false, err diff --git a/utils/io/fileutils/files_test.go b/utils/io/fileutils/files_test.go index cf27e7a51..a8e9ecb4d 100644 --- a/utils/io/fileutils/files_test.go +++ b/utils/io/fileutils/files_test.go @@ -225,7 +225,7 @@ func TestListFilesByFilterFunc(t *testing.T) { ext := strings.TrimLeft(filepath.Ext(filePath), ".") return regexp.MatchString(`.*proj$`, ext) } - files, err := ListFilesByFilterFunc(testDir, filterFunc) + files, err := ListFilesWithFilterFunc(testDir, true, false, filterFunc) if err != nil { assert.NoError(t, err) return diff --git a/utils/regexputils.go b/utils/regexputils.go index a901de6a1..c5fcfe7eb 100644 --- a/utils/regexputils.go +++ b/utils/regexputils.go @@ -11,8 +11,8 @@ const CredentialsInUrlRegexp = `(http|https|git)://.+@` func GetRegExp(regex string) (*regexp.Regexp, error) { regExp, err := regexp.Compile(regex) - if errorutils.CheckError(err) != nil { - return nil, err + if err != nil { + return nil, errorutils.CheckErrorf("failed to compile regex '%s' : %w", regex, err) } return regExp, nil } diff --git a/utils/utils.go b/utils/utils.go index 7b50eaea7..03bd62c39 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -28,7 +28,7 @@ import ( const ( Development = "development" Agent = "jfrog-client-go" - Version = "1.41.2" + Version = "1.42.0" ) type MinVersionProduct string @@ -282,8 +282,7 @@ func BuildTargetPath(pattern, path, target string, ignoreRepo bool) (string, boo pattern += "(/.*)?$" } - r, err := regexp.Compile(pattern) - err = errorutils.CheckError(err) + r, err := GetRegExp(pattern) if err != nil { return "", false, err } diff --git a/xray/manager.go b/xray/manager.go index d1eb6eca8..fa14c5ceb 100644 --- a/xray/manager.go +++ b/xray/manager.go @@ -134,6 +134,20 @@ func (sm *XrayServicesManager) GetScanGraphResults(scanID string, includeVulnera return scanService.GetScanGraphResults(scanID, includeVulnerabilities, includeLicenses, xscEnabled) } +func (sm *XrayServicesManager) ImportGraph(params services.XrayGraphImportParams) (scanId string, err error) { + enrichService := services.NewEnrichService(sm.client) + enrichService.XrayDetails = sm.config.GetServiceDetails() + return enrichService.ImportGraph(params) +} + +// GetScanGraphResults returns an Xray scan output of the requested graph scan. +// The scanId input should be received from ScanGraph request. +func (sm *XrayServicesManager) GetImportGraphResults(scanID string) (*services.ScanResponse, error) { + enrichService := services.NewEnrichService(sm.client) + enrichService.XrayDetails = sm.config.GetServiceDetails() + return enrichService.GetImportGraphResults(scanID) +} + // BuildScan scans a published build-info with Xray. // 'scanResponse' - Xray scan output of the requested build scan. // 'noFailBuildPolicy' - Indicates that the Xray API returned a "No Xray Fail build...." error diff --git a/xray/services/enrich.go b/xray/services/enrich.go new file mode 100644 index 000000000..9c999cacb --- /dev/null +++ b/xray/services/enrich.go @@ -0,0 +1,98 @@ +package services + +import ( + "encoding/json" + "github.com/jfrog/jfrog-client-go/artifactory/services/utils" + "github.com/jfrog/jfrog-client-go/auth" + "github.com/jfrog/jfrog-client-go/http/jfroghttpclient" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/httputils" + "github.com/jfrog/jfrog-client-go/utils/log" + xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" + "net/http" +) + +const ( + importGraph = "api/v1/scan/import" + importGraphXML = "api/v1/scan/import_xml" +) + +type EnrichService struct { + client *jfroghttpclient.JfrogHttpClient + XrayDetails auth.ServiceDetails +} + +// NewEnrichService creates a new service to enrich CycloneDX xml and jsons. +func NewEnrichService(client *jfroghttpclient.JfrogHttpClient) *EnrichService { + return &EnrichService{client: client} +} + +func (es *EnrichService) ImportGraph(importParams XrayGraphImportParams) (string, error) { + httpClientsDetails := es.XrayDetails.CreateHttpClientDetails() + var v interface{} + // There's an option to run on XML or JSON file so we need to call the correct API accordingly. + err := json.Unmarshal(importParams.SBOMInput, &v) + var url string + if err != nil { + utils.SetContentType("application/xml", &httpClientsDetails.Headers) + url = es.XrayDetails.GetUrl() + importGraphXML + } else { + utils.SetContentType("application/json", &httpClientsDetails.Headers) + url = es.XrayDetails.GetUrl() + importGraph + } + + requestBody := importParams.SBOMInput + resp, body, err := es.client.SendPost(url, requestBody, &httpClientsDetails) + if err != nil { + return "", err + } + if err = errorutils.CheckResponseStatusWithBody(resp, body, http.StatusOK, http.StatusCreated); err != nil { + scanErrorJson := ScanErrorJson{} + if e := json.Unmarshal(body, &scanErrorJson); e == nil { + return "", errorutils.CheckErrorf(scanErrorJson.Error) + } + return "", err + } + scanResponse := RequestScanResponse{} + if err = json.Unmarshal(body, &scanResponse); err != nil { + return "", errorutils.CheckError(err) + } + return scanResponse.ScanId, err +} + +func (es *EnrichService) GetImportGraphResults(scanId string) (*ScanResponse, error) { + httpClientsDetails := es.XrayDetails.CreateHttpClientDetails() + utils.SetContentType("application/json", &httpClientsDetails.Headers) + + // Getting the import graph results is from the same api but with some parameters always initialized. + endPoint := es.XrayDetails.GetUrl() + scanGraphAPI + "/" + scanId + includeVulnerabilitiesParam + log.Info("Waiting for enrich process to complete on JFrog Xray...") + pollingExecutor := &httputils.PollingExecutor{ + Timeout: defaultMaxWaitMinutes, + PollingInterval: defaultSyncSleepInterval, + PollingAction: xrayUtils.PollingAction(es.client, endPoint, httpClientsDetails), + MsgPrefix: "Get Dependencies Scan results... ", + } + body, err := pollingExecutor.Execute() + if err != nil { + return nil, err + } + scanResponse := ScanResponse{} + if err = json.Unmarshal(body, &scanResponse); err != nil { + return nil, errorutils.CheckErrorf("couldn't parse JFrog Xray server response: " + err.Error()) + } + if scanResponse.ScannedStatus == xrayScanStatusFailed { + // Failed due to an internal Xray error + return nil, errorutils.CheckErrorf("received a failure status from JFrog Xray server:\n%s", errorutils.GenerateErrorString(body)) + } + return &scanResponse, err +} + +type XrayGraphImportParams struct { + // A path in Artifactory that this Artifact is intended to be deployed to. + // This will provide a way to extract the watches that should be applied on this graph + ScanType ScanType + SBOMInput []byte + XscGitInfoContext *XscGitInfoContext + XscVersion string +} diff --git a/xray/services/scan.go b/xray/services/scan.go index e89a6fa11..177cb9b38 100644 --- a/xray/services/scan.go +++ b/xray/services/scan.go @@ -164,24 +164,11 @@ func (ss *ScanService) GetScanGraphResults(scanId string, includeVulnerabilities endPoint += includeLicensesParam } log.Info("Waiting for scan to complete on JFrog Xray...") - pollingAction := func() (shouldStop bool, responseBody []byte, err error) { - resp, body, _, err := ss.client.SendGet(endPoint, true, &httpClientsDetails) - if err != nil { - return true, nil, err - } - if err = errorutils.CheckResponseStatusWithBody(resp, body, http.StatusOK, http.StatusAccepted); err != nil { - return true, nil, err - } - // Got the full valid response. - if resp.StatusCode == http.StatusOK { - return true, body, nil - } - return false, nil, nil - } + pollingExecutor := &httputils.PollingExecutor{ Timeout: defaultMaxWaitMinutes, PollingInterval: defaultSyncSleepInterval, - PollingAction: pollingAction, + PollingAction: xrayUtils.PollingAction(ss.client, endPoint, httpClientsDetails), MsgPrefix: "Get Dependencies Scan results... ", } diff --git a/xray/services/utils/pollingaction.go b/xray/services/utils/pollingaction.go new file mode 100644 index 000000000..3ea6b22c5 --- /dev/null +++ b/xray/services/utils/pollingaction.go @@ -0,0 +1,26 @@ +package utils + +import ( + "github.com/jfrog/jfrog-client-go/http/jfroghttpclient" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/httputils" + "net/http" +) + +func PollingAction(client *jfroghttpclient.JfrogHttpClient, endPoint string, httpClientDetails httputils.HttpClientDetails) (action func() (shouldStop bool, responseBody []byte, err error)) { + pollingAction := func() (shouldStop bool, responseBody []byte, err error) { + resp, body, _, err := client.SendGet(endPoint, true, &httpClientDetails) + if err != nil { + return true, nil, err + } + if err = errorutils.CheckResponseStatusWithBody(resp, body, http.StatusOK, http.StatusAccepted); err != nil { + return true, nil, err + } + // Got the full valid response. + if resp.StatusCode == http.StatusOK { + return true, body, nil + } + return false, nil, nil + } + return pollingAction +}