You've already forked sap-jenkins-library
							
							
				mirror of
				https://github.com/SAP/jenkins-library.git
				synced 2025-10-30 23:57:50 +02:00 
			
		
		
		
	feat (protecodeExecuteScan) support custom data headers in upload request (#3836)
* support custom data headers in Protecode upload API * parse custom data properly * better debugging * pre-pend META- to custom data header * prepend to key, not value * fix debug output * Fix debug log level * Make custom header meta upper case key * fix * Update http.go * Update protecode.go * Update http.go * address PR review * Fix merge conflicts * Update fortifyExecuteScan.yaml * Update protecodeExecuteScan_generated.go * Generate step * fix generated code Co-authored-by: Oliver Nocon <33484802+OliverNocon@users.noreply.github.com>
This commit is contained in:
		| @@ -349,7 +349,7 @@ func uploadFile(utils protecodeUtils, config protecodeExecuteScanOptions, produc | ||||
|  | ||||
| 	if len(config.FetchURL) > 0 { | ||||
| 		log.Entry().Debugf("Declare fetch url %v", config.FetchURL) | ||||
| 		resultData := client.DeclareFetchURL(config.CleanupMode, config.Group, config.FetchURL, version, productID, replaceBinary) | ||||
| 		resultData := client.DeclareFetchURL(config.CleanupMode, config.Group, config.CustomDataJSONMap, config.FetchURL, version, productID, replaceBinary) | ||||
| 		productID = resultData.Result.ProductID | ||||
| 	} else { | ||||
| 		log.Entry().Debugf("Upload file path: %v", config.FilePath) | ||||
| @@ -366,7 +366,7 @@ func uploadFile(utils protecodeUtils, config protecodeExecuteScanOptions, produc | ||||
| 			combinedFileName = fmt.Sprintf("%v_%v", config.PullRequestName, fileName) | ||||
| 		} | ||||
|  | ||||
| 		resultData := client.UploadScanFile(config.CleanupMode, config.Group, pathToFile, combinedFileName, version, productID, replaceBinary) | ||||
| 		resultData := client.UploadScanFile(config.CleanupMode, config.Group, config.CustomDataJSONMap, pathToFile, combinedFileName, version, productID, replaceBinary) | ||||
| 		productID = resultData.Result.ProductID | ||||
| 	} | ||||
| 	return productID | ||||
|   | ||||
| @@ -43,6 +43,7 @@ type protecodeExecuteScanOptions struct { | ||||
| 	CustomScanVersion           string `json:"customScanVersion,omitempty"` | ||||
| 	VersioningModel             string `json:"versioningModel,omitempty" validate:"possible-values=major major-minor semantic full"` | ||||
| 	PullRequestName             string `json:"pullRequestName,omitempty"` | ||||
| 	CustomDataJSONMap           string `json:"customDataJSONMap,omitempty"` | ||||
| } | ||||
|  | ||||
| type protecodeExecuteScanInflux struct { | ||||
| @@ -261,6 +262,7 @@ func addProtecodeExecuteScanFlags(cmd *cobra.Command, stepConfig *protecodeExecu | ||||
| 	cmd.Flags().StringVar(&stepConfig.CustomScanVersion, "customScanVersion", os.Getenv("PIPER_customScanVersion"), "A custom version used along with the uploaded scan results.") | ||||
| 	cmd.Flags().StringVar(&stepConfig.VersioningModel, "versioningModel", `major`, "The versioning model used for result reporting (based on the artifact version). Example 1.2.3 using `major` will result in version 1") | ||||
| 	cmd.Flags().StringVar(&stepConfig.PullRequestName, "pullRequestName", os.Getenv("PIPER_pullRequestName"), "The name of the pull request") | ||||
| 	cmd.Flags().StringVar(&stepConfig.CustomDataJSONMap, "customDataJSONMap", os.Getenv("PIPER_customDataJSONMap"), "The JSON map of key-value pairs to be included in this scan's Custom Data (See protecode API).") | ||||
|  | ||||
| 	cmd.MarkFlagRequired("serverUrl") | ||||
| 	cmd.MarkFlagRequired("group") | ||||
| @@ -540,6 +542,15 @@ func protecodeExecuteScanMetadata() config.StepData { | ||||
| 						Aliases:     []config.Alias{}, | ||||
| 						Default:     os.Getenv("PIPER_pullRequestName"), | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name:        "customDataJSONMap", | ||||
| 						ResourceRef: []config.ResourceReference{}, | ||||
| 						Scope:       []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, | ||||
| 						Type:        "string", | ||||
| 						Mandatory:   false, | ||||
| 						Aliases:     []config.Alias{}, | ||||
| 						Default:     os.Getenv("PIPER_customDataJSONMap"), | ||||
| 					}, | ||||
| 				}, | ||||
| 			}, | ||||
| 			Outputs: config.StepOutputs{ | ||||
|   | ||||
| @@ -329,19 +329,36 @@ func (pc *Protecode) LoadReport(reportFileName string, productID int) *io.ReadCl | ||||
| } | ||||
|  | ||||
| // UploadScanFile upload the scan file to the protecode server | ||||
| func (pc *Protecode) UploadScanFile(cleanupMode, group, filePath, fileName, version string, productID int, replaceBinary bool) *ResultData { | ||||
| func (pc *Protecode) UploadScanFile(cleanupMode, group, customDataJSONMap, filePath, fileName, version string, productID int, replaceBinary bool) *ResultData { | ||||
| 	log.Entry().Debugf("[DEBUG] ===> UploadScanFile started.....") | ||||
|  | ||||
| 	deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete") | ||||
|  | ||||
| 	var headers = make(map[string][]string) | ||||
| 	if len(customDataJSONMap) > 0 { | ||||
| 		customDataHeaders := map[string]string{} | ||||
| 		if err := json.Unmarshal([]byte(customDataJSONMap), &customDataHeaders); err != nil { | ||||
| 			log.Entry().Warn("[WARN] ===> customDataJSONMap flag must be a valid JSON map. Check the value of --customDataJSONMap and try again.") | ||||
| 		} else { | ||||
| 			for k, v := range customDataHeaders { | ||||
| 				headers["META-"+strings.ToUpper(k)] = []string{v} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	headers["Group"] = []string{group} | ||||
| 	headers["Delete-Binary"] = []string{fmt.Sprintf("%v", deleteBinary)} | ||||
|  | ||||
| 	if (replaceBinary) && (version != "") { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Replace": {fmt.Sprintf("%v", productID)}, "Version": {version}} | ||||
| 		log.Entry().Debugf("[DEBUG] ===> replaceBinary && version != empty ") | ||||
| 		headers["Replace"] = []string{fmt.Sprintf("%v", productID)} | ||||
| 		headers["Version"] = []string{version} | ||||
| 	} else if replaceBinary { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Replace": {fmt.Sprintf("%v", productID)}} | ||||
| 		headers["Replace"] = []string{fmt.Sprintf("%v", productID)} | ||||
| 		log.Entry().Debugf("[DEBUG] ===> replaceBinary") | ||||
| 	} else if version != "" { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Version": {version}} | ||||
| 	} else { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}} | ||||
| 		log.Entry().Debugf("[DEBUG] ===> version != empty ") | ||||
| 		headers["Version"] = []string{version} | ||||
| 	} | ||||
|  | ||||
| 	uploadURL := fmt.Sprintf("%v/api/upload/%v", pc.serverURL, fileName) | ||||
| @@ -371,19 +388,35 @@ func (pc *Protecode) UploadScanFile(cleanupMode, group, filePath, fileName, vers | ||||
| } | ||||
|  | ||||
| // DeclareFetchURL configures the fetch url for the protecode scan | ||||
| func (pc *Protecode) DeclareFetchURL(cleanupMode, group, fetchURL, version string, productID int, replaceBinary bool) *ResultData { | ||||
| func (pc *Protecode) DeclareFetchURL(cleanupMode, group, customDataJSONMap, fetchURL, version string, productID int, replaceBinary bool) *ResultData { | ||||
| 	deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete") | ||||
|  | ||||
| 	var headers = make(map[string][]string) | ||||
| 	if len(customDataJSONMap) > 0 { | ||||
| 		customDataHeaders := map[string]string{} | ||||
| 		if err := json.Unmarshal([]byte(customDataJSONMap), &customDataHeaders); err != nil { | ||||
| 			log.Entry().Warn("[WARN] ===> customDataJSONMap flag must be a valid JSON map. Check the value of --customDataJSONMap and try again.") | ||||
| 		} else { | ||||
| 			for k, v := range customDataHeaders { | ||||
| 				headers["META-"+strings.ToUpper(k)] = []string{v} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	headers["Group"] = []string{group} | ||||
| 	headers["Delete-Binary"] = []string{fmt.Sprintf("%v", deleteBinary)} | ||||
| 	headers["Url"] = []string{fetchURL} | ||||
| 	headers["Content-Type"] = []string{"application/json"} | ||||
| 	if (replaceBinary) && (version != "") { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Replace": {fmt.Sprintf("%v", productID)}, "Version": {version}, "Url": {fetchURL}, "Content-Type": {"application/json"}} | ||||
| 		log.Entry().Debugf("[DEBUG][FETCH_URL] ===> replaceBinary && version != empty ") | ||||
| 		headers["Replace"] = []string{fmt.Sprintf("%v", productID)} | ||||
| 		headers["Version"] = []string{version} | ||||
| 	} else if replaceBinary { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Replace": {fmt.Sprintf("%v", productID)}, "Url": {fetchURL}, "Content-Type": {"application/json"}} | ||||
| 		log.Entry().Debugf("[DEBUG][FETCH_URL] ===> replaceBinary") | ||||
| 		headers["Replace"] = []string{fmt.Sprintf("%v", productID)} | ||||
| 	} else if version != "" { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Version": {version}, "Url": {fetchURL}, "Content-Type": {"application/json"}} | ||||
| 	} else { | ||||
| 		headers = map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Url": {fetchURL}, "Content-Type": {"application/json"}} | ||||
| 		log.Entry().Debugf("[DEBUG][FETCH_URL] ===> version != empty ") | ||||
| 		headers["Version"] = []string{version} | ||||
| 	} | ||||
|  | ||||
| 	protecodeURL := fmt.Sprintf("%v/api/fetch/", pc.serverURL) | ||||
|   | ||||
| @@ -312,26 +312,27 @@ func TestDeclareFetchURLSuccess(t *testing.T) { | ||||
| 	pc := makeProtecode(Options{ServerURL: server.URL}) | ||||
|  | ||||
| 	cases := []struct { | ||||
| 		cleanupMode    string | ||||
| 		protecodeGroup string | ||||
| 		fetchURL       string | ||||
| 		version        string | ||||
| 		productID      int | ||||
| 		replaceBinary  bool | ||||
| 		want           int | ||||
| 		cleanupMode       string | ||||
| 		protecodeGroup    string | ||||
| 		customDataJSONMap string | ||||
| 		fetchURL          string | ||||
| 		version           string | ||||
| 		productID         int | ||||
| 		replaceBinary     bool | ||||
| 		want              int | ||||
| 	}{ | ||||
| 		{"binary", "group1", "/api/fetch/", "", 1, true, 111}, | ||||
| 		{"binary", "group1", "/api/fetch/", "custom-test-version", -1, true, 111}, | ||||
| 		{"binary", "group1", "/api/fetch/", "1.2.3", 0, true, 111}, | ||||
| 		{"binary", "group1", `{"custom-header": "custom-value"}`, "/api/fetch/", "", 1, true, 111}, | ||||
| 		{"binary", "group1", "", "/api/fetch/", "custom-test-version", -1, true, 111}, | ||||
| 		{"binary", "group1", "", "/api/fetch/", "1.2.3", 0, true, 111}, | ||||
|  | ||||
| 		{"binary", "group1", "/api/fetch/", "", 1, false, 111}, | ||||
| 		{"binary", "group1", "/api/fetch/", "custom-test-version", -1, false, 111}, | ||||
| 		{"binary", "group1", "/api/fetch/", "1.2.3", 0, false, 111}, | ||||
| 		{"binary", "group1", "", "/api/fetch/", "", 1, false, 111}, | ||||
| 		{"binary", "group1", "", "/api/fetch/", "custom-test-version", -1, false, 111}, | ||||
| 		{"binary", "group1", "", "/api/fetch/", "1.2.3", 0, false, 111}, | ||||
| 	} | ||||
| 	for _, c := range cases { | ||||
|  | ||||
| 		// pc.DeclareFetchURL(c.cleanupMode, c.protecodeGroup, c.fetchURL) | ||||
| 		got := pc.DeclareFetchURL(c.cleanupMode, c.protecodeGroup, c.fetchURL, c.version, c.productID, c.replaceBinary) | ||||
| 		got := pc.DeclareFetchURL(c.cleanupMode, c.protecodeGroup, c.customDataJSONMap, c.fetchURL, c.version, c.productID, c.replaceBinary) | ||||
|  | ||||
| 		assert.Equal(t, requestURI, "/api/fetch/") | ||||
| 		assert.Equal(t, got.Result.ProductID, c.want) | ||||
| @@ -421,28 +422,29 @@ func TestUploadScanFileSuccess(t *testing.T) { | ||||
| 	} | ||||
|  | ||||
| 	cases := []struct { | ||||
| 		cleanupMode    string | ||||
| 		protecodeGroup string | ||||
| 		filePath       string | ||||
| 		version        string | ||||
| 		productID      int | ||||
| 		replaceBinary  bool | ||||
| 		want           int | ||||
| 		cleanupMode       string | ||||
| 		protecodeGroup    string | ||||
| 		customDataJSONMap string | ||||
| 		filePath          string | ||||
| 		version           string | ||||
| 		productID         int | ||||
| 		replaceBinary     bool | ||||
| 		want              int | ||||
| 	}{ | ||||
| 		{"binary", "group1", testFile.Name(), "", 1, true, 1}, | ||||
| 		{"binary", "group1", testFile.Name(), "custom-test-version", 0, true, 0}, | ||||
| 		{"binary", "group1", testFile.Name(), "1.2.3", -1, true, -1}, | ||||
| 		{"binary", "group1", `{"custom-header": "custom-value"}`, testFile.Name(), "", 1, true, 1}, | ||||
| 		{"binary", "group1", "", testFile.Name(), "custom-test-version", 0, true, 0}, | ||||
| 		{"binary", "group1", "", testFile.Name(), "1.2.3", -1, true, -1}, | ||||
|  | ||||
| 		{"binary", "group1", testFile.Name(), "", 1, false, 112}, | ||||
| 		{"binary", "group1", testFile.Name(), "custom-test-version", 0, false, 112}, | ||||
| 		{"binary", "group1", testFile.Name(), "1.2.3", -1, false, 112}, | ||||
| 		{"binary", "group1", "", testFile.Name(), "", 1, false, 112}, | ||||
| 		{"binary", "group1", "", testFile.Name(), "custom-test-version", 0, false, 112}, | ||||
| 		{"binary", "group1", "", testFile.Name(), "1.2.3", -1, false, 112}, | ||||
|  | ||||
| 		// {"binary", "group1", testFile.Name(), "/api/upload/dummy"}, | ||||
| 		// {"Test", "group2", testFile.Name(), "/api/upload/dummy"}, | ||||
| 	} | ||||
| 	for _, c := range cases { | ||||
|  | ||||
| 		got := pc.UploadScanFile(c.cleanupMode, c.protecodeGroup, c.filePath, "dummy.tar", c.version, c.productID, c.replaceBinary) | ||||
| 		got := pc.UploadScanFile(c.cleanupMode, c.protecodeGroup, c.customDataJSONMap, c.filePath, "dummy.tar", c.version, c.productID, c.replaceBinary) | ||||
|  | ||||
| 		assert.Equal(t, requestURI, "/api/upload/dummy.tar") | ||||
| 		assert.Contains(t, passedHeaders, "Group") | ||||
|   | ||||
| @@ -263,6 +263,15 @@ spec: | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|       - name: customDataJSONMap | ||||
|         type: string | ||||
|         description: | ||||
|           "The JSON map of key-value pairs to be included in this scan's Custom Data (See protecode API)." | ||||
|         scope: | ||||
|           - GENERAL | ||||
|           - STEPS | ||||
|           - STAGES | ||||
|           - PARAMETERS | ||||
|   outputs: | ||||
|     resources: | ||||
|       - name: influx | ||||
|   | ||||
		Reference in New Issue
	
	Block a user