diff --git a/.tekton/file-integrity-operator-bundle-1-3-pull-request.yaml b/.tekton/file-integrity-operator-bundle-1-3-pull-request.yaml index 6e3d1b813..b82dba231 100644 --- a/.tekton/file-integrity-operator-bundle-1-3-pull-request.yaml +++ b/.tekton/file-integrity-operator-bundle-1-3-pull-request.yaml @@ -28,8 +28,11 @@ spec: value: 5d - name: dockerfile value: bundle.openshift.Dockerfile + - name: build-args + value: + - 'BUILD_MODE=on-pr' - name: hermetic - value: "true" + value: "false" - name: prefetch-input value: '[{"type": "gomod", "path": "."}]' pipelineSpec: diff --git a/.tekton/file-integrity-operator-bundle-pull-request.yaml b/.tekton/file-integrity-operator-bundle-pull-request.yaml index d87eb9185..10a46869a 100644 --- a/.tekton/file-integrity-operator-bundle-pull-request.yaml +++ b/.tekton/file-integrity-operator-bundle-pull-request.yaml @@ -29,7 +29,7 @@ spec: - name: dockerfile value: bundle.openshift.Dockerfile - name: hermetic - value: "true" + value: "false" - name: prefetch-input value: '[{"type": "gomod", "path": "."}]' pipelineSpec: diff --git a/.tekton/file-integrity-operator-master-pull-request.yaml b/.tekton/file-integrity-operator-master-pull-request.yaml index cac2bb9ad..be5e7c6cc 100644 --- a/.tekton/file-integrity-operator-master-pull-request.yaml +++ b/.tekton/file-integrity-operator-master-pull-request.yaml @@ -28,6 +28,9 @@ spec: value: 5d - name: dockerfile value: build/Dockerfile.openshift + - name: build-args + value: + - 'BUILD_MODE=on-pr' - name: hermetic value: "true" - name: prefetch-input diff --git a/bundle-hack/update_csv.go b/bundle-hack/update_csv.go index 28a2cdb05..530d41cf8 100644 --- a/bundle-hack/update_csv.go +++ b/bundle-hack/update_csv.go @@ -1,48 +1,65 @@ package main import ( - "encoding/base64" - "fmt" - "gopkg.in/yaml.v3" - "log" - "os" - "path/filepath" - "strings" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "os/exec" + "path/filepath" + "strings" + "time" + + "gopkg.in/yaml.v3" ) +// QuayTagResponse defines the structure for parsing the JSON response from the Quay API. +type QuayTagResponse struct { + Tags []struct { + ManifestDigest string `json:"manifest_digest"` + } `json:"tags"` +} + +// readCSV reads and unmarshals a YAML file into a map. func readCSV(csvFilename string, csv *map[string]interface{}) { yamlFile, err := os.ReadFile(csvFilename) if err != nil { - log.Fatal(fmt.Sprintf("Error: Failed to read file '%s'", csvFilename)) + log.Fatalf("Error: Failed to read file '%s': %v", csvFilename, err) } err = yaml.Unmarshal(yamlFile, csv) if err != nil { - log.Fatal(fmt.Sprintf("Error: Failed to unmarshal yaml file '%s'", csvFilename)) + log.Fatalf("Error: Failed to unmarshal yaml file '%s': %v", csvFilename, err) } } +// replaceCSV writes a map to a new YAML file. func replaceCSV(csvFilename string, outputCSVFilename string, csv map[string]interface{}) { err := os.Remove(csvFilename) if err != nil { - log.Fatal(fmt.Sprintf("Error: Failed to remofe file '%s'", csvFilename)) + log.Fatalf("Error: Failed to remove file '%s': %v", csvFilename, err) } f, err := os.Create(outputCSVFilename) if err != nil { - log.Fatal(fmt.Sprintf("Error: Failed to create file '%s'", outputCSVFilename)) + log.Fatalf("Error: Failed to create file '%s': %v", outputCSVFilename, err) } + defer f.Close() enc := yaml.NewEncoder(f) - defer enc.Close() enc.SetIndent(2) + defer enc.Close() err = enc.Encode(csv) if err != nil { - log.Fatal("Error: Failed encode the CSV into yaml") + log.Fatalf("Error: Failed to encode the CSV into yaml: %v", err) } } +// getInputCSVFilePath finds the ClusterServiceVersion YAML file in a directory. func getInputCSVFilePath(dir string) string { filenames, err := os.ReadDir(dir) if err != nil { @@ -51,7 +68,7 @@ func getInputCSVFilePath(dir string) string { for _, filename := range filenames { if strings.HasSuffix(filename.Name(), "clusterserviceversion.yaml") { - return filepath.Join(dir,filename.Name()) + return filepath.Join(dir, filename.Name()) } } @@ -59,19 +76,21 @@ func getInputCSVFilePath(dir string) string { return "" } +// getOutputCSVFilePath constructs the output file path for the new CSV. func getOutputCSVFilePath(dir string, version string) string { return filepath.Join(dir, fmt.Sprintf("file-integrity-operator.v%s.clusterserviceversion.yaml", version)) } -func addRequiredAnnotations(csv map[string]interface{}){ +// addRequiredAnnotations adds a set of required annotations to the CSV. +func addRequiredAnnotations(csv map[string]interface{}) { requiredAnnotations := map[string]string{ - "features.operators.openshift.io/disconnected": "true", - "features.operators.openshift.io/fips-compliant": "true", - "features.operators.openshift.io/proxy-aware": "false", - "features.operators.openshift.io/tls-profiles": "false", - "features.operators.openshift.io/token-auth-aws": "false", + "features.operators.openshift.io/disconnected": "true", + "features.operators.openshift.io/fips-compliant": "true", + "features.operators.openshift.io/proxy-aware": "false", + "features.operators.openshift.io/tls-profiles": "false", + "features.operators.openshift.io/token-auth-aws": "false", "features.operators.openshift.io/token-auth-azure": "false", - "features.operators.openshift.io/token-auth-gcp": "false", + "features.operators.openshift.io/token-auth-gcp": "false", } annotations, ok := csv["metadata"].(map[string]interface{})["annotations"].(map[string]interface{}) @@ -85,14 +104,18 @@ func addRequiredAnnotations(csv map[string]interface{}){ fmt.Println("Added required annotations") } +// replaceVersion updates version strings within the CSV. func replaceVersion(oldVersion, newVersion string, csv map[string]interface{}) { spec, ok := csv["spec"].(map[string]interface{}) - metadata, ok := csv["metadata"].(map[string]interface{}) if !ok { log.Fatal("Error: 'spec' does not exist in the CSV content") } + metadata, ok := csv["metadata"].(map[string]interface{}) + if !ok { + log.Fatal("Error: 'metadata' does not exist in the CSV content") + } - fmt.Println(fmt.Sprintf("Updating version references from %s to %s", oldVersion, newVersion)) + fmt.Printf("Updating version references from %s to %s\n", oldVersion, newVersion) spec["version"] = newVersion spec["replaces"] = "file-integrity-operator.v" + oldVersion @@ -100,77 +123,179 @@ func replaceVersion(oldVersion, newVersion string, csv map[string]interface{}) { metadata["name"] = strings.Replace(metadata["name"].(string), oldVersion, newVersion, 1) annotations := metadata["annotations"].(map[string]interface{}) - annotations["olm.skipRange"] = strings.Replace(annotations["olm.skipRange"].(string), oldVersion, newVersion, 1) + annotations["olm.skipRange"] = fmt.Sprintf(">=%s", newVersion) - fmt.Println(fmt.Sprintf("Updated version references from %s to %s", oldVersion, newVersion)) + fmt.Printf("Updated version references from %s to %s\n", oldVersion, newVersion) } +// replaceIcon updates the operator icon in the CSV. func replaceIcon(csv map[string]interface{}) { - - s, ok := csv["spec"] + spec, ok := csv["spec"].(map[string]interface{}) if !ok { log.Fatal("Error: 'spec' does not exist in the CSV content") } - spec := s.(map[string]interface{}) iconPath := "../bundle/icons/icon.png" - iconData,err := os.ReadFile(iconPath) + iconData, err := os.ReadFile(iconPath) if err != nil { - log.Fatal(fmt.Sprintf("Error: Failed to read icon file '%s'", iconPath)) + log.Fatalf("Error: Failed to read icon file '%s': %v", iconPath, err) } + icon := make(map[string]string) icon["base64data"] = base64.StdEncoding.EncodeToString(iconData) - icon["media"] = "image/png" + icon["mediatype"] = "image/png" var icons = make([]map[string]string, 1) icons[0] = icon spec["icon"] = icons - fmt.Println(fmt.Sprintf("Updated the operator image to use icon in %s", iconPath)) + fmt.Printf("Updated the operator image to use icon in %s\n", iconPath) } +// recoverFromReplaceImages handles panics during image replacement. func recoverFromReplaceImages() { if r := recover(); r != nil { - log.Fatal("Error: It was not possible to replace RELATED_IMAGE_OPERATOR") + log.Fatalf("Error: It was not possible to replace RELATED_IMAGE_OPERATOR: %v", r) } } -func replaceImages(csv map[string]interface{}) { - defer recoverFromReplaceImages() +// getLatestGitCommitSha retrieves the latest git commit SHA. +func getLatestGitCommitSha() string { + cmd := exec.Command("git", "rev-parse", "HEAD") + out, err := cmd.Output() + if err != nil { + log.Fatalf("Error getting latest git commit SHA: %v", err) + } + return strings.TrimSpace(string(out)) +} + +// getDigestFromQuay fetches the manifest digest for a specific tag from Quay.io, +// retrying for up to 30 minutes. +func getDigestFromQuay(tag string) string { + const timeout = 30 * time.Minute + const retryInterval = 1 * time.Minute + + startTime := time.Now() + fmt.Printf("Attempting to find manifest digest for tag '%s'. Will retry for up to %v.\n", tag, timeout) + + for { + // Check for timeout at the beginning of each iteration + if time.Since(startTime) >= timeout { + log.Fatalf("Timeout: Failed to find manifest digest for tag '%s' after %v.", tag, timeout) + } - // Konflux will automatically update the image sha based on the most - // recent builds. We want to peel off the SHA and append it to the Red - // Hat registry so that the bundle image will work when it's available - // there. - konfluxPullSpec := "quay.io/redhat-user-workloads/ocp-isc-tenant/file-integrity-operator@sha256:69670664d82a5cacc2f3d0c0c0066fcdcf93de74b4ddf176f7458df274d69a42" - delimiter := "@" - parts := strings.Split(konfluxPullSpec, delimiter) - if len(parts) > 2 { - log.Fatalf("Error: Failed to safely determine image SHA from Konflux pull spec: %s", konfluxPullSpec) + var digestFound string = "" + + // Create and send the request + url := fmt.Sprintf("https://quay.io/api/v1/repository/redhat-user-workloads/ocp-isc-tenant/file-integrity-operator/tag/?specificTag=%s", tag) + resp, err := http.Get(url) + + if err != nil { + log.Printf("Warning: Error fetching from Quay.io: %v.", err) + } else { + if resp.StatusCode == http.StatusOK { + body, readErr := io.ReadAll(resp.Body) + if readErr != nil { + log.Printf("Warning: Error reading response body: %v.", readErr) + } else { + var quayResponse QuayTagResponse + if jsonErr := json.Unmarshal(body, &quayResponse); jsonErr != nil { + log.Printf("Warning: Error unmarshaling JSON: %v.", jsonErr) + } else if len(quayResponse.Tags) > 0 && quayResponse.Tags[0].ManifestDigest != "" { + digest := quayResponse.Tags[0].ManifestDigest + fmt.Printf("Success: Found manifest digest '%s' after %v.\n", digest, time.Since(startTime).Round(time.Second)) + digestFound = digest // Store the digest to return later + } + } + } else { + bodyBytes, _ := io.ReadAll(resp.Body) + log.Printf("Warning: Received non-200 status from Quay.io: %s. Body: %s.", resp.Status, string(bodyBytes)) + } + // IMPORTANT: Close the body inside the loop to prevent resource leaks + resp.Body.Close() + } + + // If we found the digest, exit the loop and return it. + if digestFound != "" { + return digestFound + } + + // Wait before the next retry + log.Printf("Manifest digest not yet found. Retrying in %v...", retryInterval) + time.Sleep(retryInterval) } - imageSha := parts[1] +} + +// replaceImages updates the operator and related images in the CSV. +func replaceImages(csv map[string]interface{}, imageSha string) { + registry := "registry.redhat.io/compliance/openshift-file-integrity-rhel8-operator" - redHatPullSpec := registry + delimiter + imageSha + redHatPullSpec := registry + "@" + imageSha + + installSpec, ok := csv["spec"].(map[string]interface{})["install"].(map[string]interface{})["spec"].(map[string]interface{}) + if !ok { + log.Fatal("Error: 'spec.install.spec' does not exist in the CSV content") + } + + deployments, ok := installSpec["deployments"].([]interface{}) + if !ok || len(deployments) == 0 { + log.Fatal("Error: 'deployments' not found in the CSV content") + } + deployment, ok := deployments[0].(map[string]interface{}) + if !ok { + log.Fatal("Error: Could not process deployment in the CSV content") + } - env, ok := csv["spec"].(map[string]interface{})["install"].(map[string]interface{})["spec"].(map[string]interface{})["deployments"].([]interface{})[0].(map[string]interface{})["spec"].(map[string]interface{})["template"].(map[string]interface{})["spec"].(map[string]interface{})["containers"].([]interface{})[0].(map[string]interface{})["env"].([]interface{}) + podSpec, ok := deployment["spec"].(map[string]interface{})["template"].(map[string]interface{})["spec"].(map[string]interface{}) if !ok { - log.Fatal("Error: 'env' with RELATED_IMAGE_OPERATOR does not exist in the CSV content") + log.Fatal("Error: 'pod spec' not found in the CSV content") } + containers, ok := podSpec["containers"].([]interface{}) + if !ok || len(containers) == 0 { + log.Fatal("Error: 'containers' not found in the CSV content") + } + container, ok := containers[0].(map[string]interface{}) + if !ok { + log.Fatal("Error: Could not process container in the CSV content") + } + + // Update container image + container["image"] = redHatPullSpec + + // Update RELATED_IMAGE_OPERATOR environment variable + env, ok := container["env"].([]interface{}) + if !ok { + log.Println("Warning: 'env' for RELATED_IMAGE_OPERATOR not found, creating it.") + env = []interface{}{} + } + + found := false for _, item := range env { - variable := item.(map[string]interface{}) - if variable["name"] == "RELATED_IMAGE_OPERATOR" { + variable, ok := item.(map[string]interface{}) + if !ok { + continue + } + if name, ok := variable["name"].(string); ok && name == "RELATED_IMAGE_OPERATOR" { variable["value"] = redHatPullSpec + found = true + break } } - containersMap := csv["spec"].(map[string]interface{})["install"].(map[string]interface{})["spec"].(map[string]interface{})["deployments"].([]interface{})[0].(map[string]interface{})["spec"].(map[string]interface{})["template"].(map[string]interface{})["spec"].(map[string]interface{})["containers"].([]interface{})[0].(map[string]interface{}) - containersMap["image"] = redHatPullSpec + if !found { + env = append(env, map[string]interface{}{ + "name": "RELATED_IMAGE_OPERATOR", + "value": redHatPullSpec, + }) + container["env"] = env + } fmt.Println("Updated the deployment manifest to use downstream builds") } +// removeRelated removes the 'relatedImages' section from the CSV. func removeRelated(csv map[string]interface{}) { spec, ok := csv["spec"].(map[string]interface{}) if !ok { @@ -182,24 +307,37 @@ func removeRelated(csv map[string]interface{}) { } func main() { - var csv map[string]interface{} + if len(os.Args) < 4 { + log.Fatalf("Usage: %s [on-pr]", os.Args[0]) + } - manifestsDir := os.Args[1] - oldVersion := os.Args[2] - newVersion := os.Args[3] + manifestsDir, oldVersion, newVersion := os.Args[1], os.Args[2], os.Args[3] + onPR := len(os.Args) > 4 && os.Args[4] == "on-pr" + + var imageSha string + if onPR { + defer recoverFromReplaceImages() + sha := getLatestGitCommitSha() + fmt.Printf("Using latest git commit SHA: %s\n", sha) + imageSha = getDigestFromQuay("on-pr-" + sha) + } else { + imageSha = "sha256:69670664d82a5cacc2f3d0c0c0066fcdcf93de74b4ddf176f7458df274d69a42" + fmt.Printf("Using default downstream digest: %s\n", imageSha) + } csvFilename := getInputCSVFilePath(manifestsDir) - fmt.Println(fmt.Sprintf("Found manifest in %s", csvFilename)) + fmt.Printf("Found manifest %s\n", csvFilename) + var csv map[string]interface{} readCSV(csvFilename, &csv) addRequiredAnnotations(csv) replaceVersion(oldVersion, newVersion, csv) replaceIcon(csv) - replaceImages(csv) + replaceImages(csv, imageSha) removeRelated(csv) outputCSVFilename := getOutputCSVFilePath(manifestsDir, newVersion) replaceCSV(csvFilename, outputCSVFilename, csv) - fmt.Println(fmt.Sprintf("Replaced CSV manifest for %s", newVersion)) + fmt.Printf("Replaced CSV manifest for %s\n", newVersion) } diff --git a/bundle.openshift.Dockerfile b/bundle.openshift.Dockerfile index f0c614cd2..9daf36a6e 100644 --- a/bundle.openshift.Dockerfile +++ b/bundle.openshift.Dockerfile @@ -5,8 +5,10 @@ WORKDIR bundle-hack ARG FIO_OLD_VERSION="1.3.5" ARG FIO_NEW_VERSION="1.3.6" +ARG BUILD_MODE="" +ENV BUILD_MODE=${BUILD_MODE} -RUN go run ./update_csv.go ../bundle/manifests ${FIO_OLD_VERSION} ${FIO_NEW_VERSION} +RUN go run ./update_csv.go ../bundle/manifests ${FIO_OLD_VERSION} ${FIO_NEW_VERSION} ${BUILD_MODE} RUN ./update_bundle_annotations.sh FROM scratch