Skip to content

Instantly share code, notes, and snippets.

@bentito
Last active November 16, 2023 16:17
Show Gist options
  • Save bentito/6236dd1022e178edc05f2f6d1b8fff5a to your computer and use it in GitHub Desktop.
Save bentito/6236dd1022e178edc05f2f6d1b8fff5a to your computer and use it in GitHub Desktop.
fips reporting go code
func GetDataFromFBC(report index.Data) (index.Data, error) {
root := "./output/" + actions.GetVersionTagFromImage(report.Flags.IndexImage) + "/configs"
fileSystem := os.DirFS(root)
fbc, err := declcfg.LoadFS(fileSystem)
if err != nil {
return report, fmt.Errorf("unable to load the file based config : %s", err)
}
model, err := declcfg.ConvertToModel(*fbc)
if err != nil {
return report, fmt.Errorf("unable to file based config to internal model: %s", err)
}
const maxConcurrency = 4
packageChan := make(chan *alphamodel.Package, maxConcurrency)
resultsChan := make(chan *index.Data, maxConcurrency)
var wg sync.WaitGroup
// Start worker goroutines
for i := 0; i < maxConcurrency; i++ {
wg.Add(1)
go packageWorker(packageChan, resultsChan, report, &wg)
}
// Send packages to the workers
go func() {
for _, Package := range model {
packageChan <- Package
}
close(packageChan)
}()
// Close the results channel when all workers are done
go func() {
wg.Wait()
close(resultsChan)
}()
// Collect results
for result := range resultsChan {
report.AuditBundle = append(report.AuditBundle, result.AuditBundle...)
}
return report, nil
}
func packageWorker(packageChan <-chan *alphamodel.Package, resultsChan chan<- *index.Data, report index.Data, wg *sync.WaitGroup) {
defer wg.Done()
for Package := range packageChan {
// Initialize a local variable to store results for this package
var result index.Data
// Iterate over the channels in the package
for _, channel := range Package.Channels {
headBundle, err := channel.Head()
if err != nil {
continue
}
for _, bundle := range channel.Bundles {
auditBundle := models.NewAuditBundle(bundle.Name, bundle.Image)
if headBundle == bundle {
auditBundle.IsHeadOfChannel = true
} else {
if flags.HeadOnly {
continue
}
}
log.Infof("Generating data from the bundle (%s)", bundle.Name)
var csv *v1alpha1.ClusterServiceVersion
err := json.Unmarshal([]byte(bundle.CsvJSON), &csv)
if err == nil {
auditBundle.CSVFromIndexDB = csv
} else {
auditBundle.Errors = append(auditBundle.Errors,
fmt.Errorf("unable to parse the csv from the index.db: %s", err).Error())
}
// Call GetDataFromBundleImage
auditBundle = actions.GetDataFromBundleImage(auditBundle, flags.DisableScorecard,
flags.DisableValidators, flags.ServerMode, flags.Label,
flags.LabelValue, flags.ContainerEngine, flags.IndexImage)
// Extra inner loop for channels
for _, channel := range Package.Channels {
auditBundle.Channels = append(auditBundle.Channels, channel.Name)
}
auditBundle.PackageName = Package.Name
auditBundle.DefaultChannel = Package.DefaultChannel.Name
// Collect properties not found in the index version
for _, property := range bundle.Properties {
auditBundle.PropertiesDB = append(auditBundle.PropertiesDB,
pkg.PropertiesAnnotation{Type: property.Type, Value: string(property.Value)})
}
headBundle, err := channel.Head()
if err == nil {
if headBundle == bundle {
auditBundle.IsHeadOfChannel = true
}
}
if flags.StaticCheckFIPSCompliance {
err = handleFIPS(auditBundle.OperatorBundleImagePath, csv, report)
if err != nil {
// Check for specific error types and provide more informative messages
if exitError, ok := err.(*exec.ExitError); ok {
if exitError.ExitCode() == 127 {
auditBundle.Errors = append(auditBundle.Errors,
"Failed to run FIPS external validator: Command not found.")
} else {
auditBundle.Errors = append(auditBundle.Errors,
fmt.Sprintf("FIPS external validator returned with exit code %d.", exitError.ExitCode()))
}
} else {
auditBundle.Errors = append(auditBundle.Errors,
fmt.Sprintf("Difficulty running FIPS external validator: %s", err.Error()))
}
}
}
result.AuditBundle = append(result.AuditBundle, *auditBundle)
}
}
// Send the result to the results channel
resultsChan <- &result
}
}
func handleFIPS(operatorBundlePath string, csv *v1alpha1.ClusterServiceVersion, reportData index.Data) error {
isClaimingFIPSCompliant, err := CheckFIPSAnnotations(csv)
if err != nil {
return err
}
if !isClaimingFIPSCompliant {
return nil
}
uniqueImageRefs, err := ExtractUniqueImageReferences(operatorBundlePath, csv)
if err != nil {
return err
}
for _, imageRef := range uniqueImageRefs {
success, warnings, errors, err := ExecuteExternalValidator(imageRef)
if err != nil {
log.Errorf("Error while executing FIPS compliance check on image: %s. Error: %s",
imageRef, err.Error())
return err
}
ProcessValidatorResults(success, warnings, errors, &reportData)
}
return nil
}
// ProcessValidatorResults takes the results from the external validator and appends them to the report data.
func ProcessValidatorResults(success bool, warnings, errors []string, report *index.Data) {
// Create a slice to hold combined errors and warnings
combinedErrors := make([]string, 0)
// If the external validator fails, append the errors
if !success {
combinedErrors = append(combinedErrors, errors...)
}
// Prepend warnings with "WARNING:" and append to combinedErrors
for _, warning := range warnings {
combinedErrors = append(combinedErrors, "WARNING: "+warning)
}
// Assuming there's a mechanism to identify which bundle is being processed
// Here, I'm just using the last bundle in the report as an example
if len(report.AuditBundle) > 0 {
report.AuditBundle[len(report.AuditBundle)-1].Errors = combinedErrors
}
}
func (d *Data) PrepareReport() Report {
d.fixPackageNameInconsistency()
var allColumns []Column
for _, v := range d.AuditBundle {
col := NewColumn(v)
// do not add bundle which has not the label
if len(d.Flags.Label) > 0 && !v.FoundLabel {
continue
}
allColumns = append(allColumns, *col)
}
sort.Slice(allColumns[:], func(i, j int) bool {
return allColumns[i].PackageName < allColumns[j].PackageName
})
finalReport := Report{}
finalReport.Flags = d.Flags
finalReport.Columns = allColumns
finalReport.IndexImageInspect = d.IndexImageInspect
dt := time.Now().Format("2006-01-02")
finalReport.GenerateAt = dt
if len(allColumns) == 0 {
log.Fatal("No data was found for the criteria informed. " +
"Please, ensure that you provide valid information.")
}
return finalReport
}
func (d *Data) OutputReport() error {
report := d.PrepareReport()
switch d.Flags.OutputFormat {
case pkg.JSON:
if err := report.writeJSON(); err != nil {
return err
}
default:
return fmt.Errorf("invalid output format : %s", d.Flags.OutputFormat)
}
return nil
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment