Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HTML Export Support #1689

Open
wants to merge 19 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
* [HDF to Checklist](#hdf-to-checklist)
* [HDF to CSV](#hdf-to-csv)
* [HDF to Condensed JSON](#hdf-to-condensed-json)
* [HDF to HTML](#hdf-to-html)

### Convert Other Formats to HDF

Expand Down Expand Up @@ -435,6 +436,38 @@ convert hdf2condensed Condensed format used by some community members
$ saf convert hdf2condensed -i rhel7-results.json -o rhel7-condensed.json
```
[top](#convert-hdf-to-other-formats)
#### HDF to HTML
```
convert hdf2html Translate an HDF file into a Heimdall Report HTML file

USAGE
$ saf convert hdf2html -i <hdf-scan-results-json>... -o <output-html> [-t <output-type>] [-h]

FLAGS
-h, --help
Show CLI help.

-i, --input=<value>...
(required) Input HDF JSON file

-o, --output=<value>
(required) Output HTML file

-t, --type=<option>
[default: Administrator] The report type to generate
Report types differ with the information they include
Executive: Profile Info + Statuses + Compliance Level
Manager: Executive + Test Results and Details
Administrator: Manager + Test Code
<options: executive|manager|administrator>

DESCRIPTION
Translate an HDF file into a Heimdall Report HTML file

EXAMPLES
$ saf convert hdf2html -i hdf_input.json -o report.html -t manager
```
[top](#convert-hdf-to-other-formats)

---
### Convert To HDF
Expand Down
48 changes: 48 additions & 0 deletions src/commands/convert/hdf2html.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import path from 'path'
import {FromHDFToHTMLMapper as Mapper} from '@mitre/hdf-converters'
import _ from 'lodash'

// All selectable export types for an HTML export
enum FileExportTypes {
Executive = 'Executive',
Manager = 'Manager',
Administrator = 'Administrator'
}

export default class HDF2HTML extends Command {
static usage = 'convert hdf2html -i <hdf-scan-results-json>... -o <output-html> [-t <output-type>] [-h]'

static description = 'Translate an HDF file into a Heimdall Report HTML file'

static examples = ['saf convert hdf2html -i hdf_input.json -o report.html -t manager']

static flags = {
help: Flags.help({char: 'h'}),
input: Flags.string({char: 'i', required: true, multiple: true, description: 'Input HDF JSON file'}),
output: Flags.string({char: 'o', required: true, description: 'Output HTML file'}),
type: Flags.string({char: 't', default: FileExportTypes.Administrator,
description: 'The report type to generate\nReport types differ with the information they include\nExecutive: Profile Info + Statuses + Compliance Level\nManager: Executive + Test Results and Details\nAdministrator: Manager + Test Code',
options: ['executive', 'manager', 'administrator']}),
}

async run() {
const {flags} = await this.parse(HDF2HTML)

const files = []

let i = 0
for (const file of flags.input) {
// Create unique fileID for html reference
const fileID = `${i++}`

const data = fs.readFileSync(file, 'utf8')
const fileName = path.basename(file)
files.push({data, fileName, fileID})
}

const converter = await new Mapper(files, _.startCase(flags.type) as FileExportTypes).toHTML()

Check failure on line 45 in src/commands/convert/hdf2html.ts

View workflow job for this annotation

GitHub Actions / build

Expected 1 arguments, but got 0.
fs.writeFileSync(flags.output, converter)
}
}
2 changes: 1 addition & 1 deletion src/commands/convert/splunk2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import {Command, Flags} from '@oclif/core'
import {SplunkMapper} from '@mitre/hdf-converters/lib/src/splunk-mapper'
import {SplunkMapper} from '@mitre/hdf-converters'
import {table} from 'table'
import {createWinstonLogger} from '../../utils/logging'
import _ from 'lodash'
Expand Down
45 changes: 45 additions & 0 deletions test/commands/convert/hdf2html.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import {expect, test} from '@oclif/test'
import tmp from 'tmp'
import path from 'path'
import fs from 'fs'
import { omitHtmlChangingfields as omitHtmlChangingFields } from '../utils'

describe('Test hdf2html triple_overlay_profile_example with default (administrator) report type', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert hdf2html', '-i', path.resolve('./test/sample_data/HDF/input/triple_overlay_profile_example.json'), '-o', `${tmpobj.name}/triple_overlay_profile_example.html`])
.it('hdf-converter output test', () => {
const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/triple_overlay_profile_example.html`, 'utf8'))
const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/triple_overlay_profile_example.htm'), 'utf8'))
expect(converted).to.eql(sample)
})
})

describe('Test hdf2html with manager report type and two input files', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert hdf2html', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), path.resolve('./test/sample_data/HDF/input/vSphere8_report.json'), '-o', `${tmpobj.name}/combined_output.html`, '-t', 'manager'])
.it('hdf-converter output test', () => {
const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/combined_output.html`, 'utf8'))
const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/combined_output.htm'), 'utf8'))
expect(converted).to.eql(sample)
})
})

describe('Test hdf2html with executive report type', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert hdf2html', '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), '-o', `${tmpobj.name}/red_hat_good.html`, '-t', 'executive'])
.it('hdf-converter output test', () => {
const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/red_hat_good.html`, 'utf8'))
const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/red_hat_good.htm'), 'utf8'))
expect(converted).to.eql(sample)
})
})

5 changes: 5 additions & 0 deletions test/commands/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,8 @@ export function omitChecklistChangingFields(input: string) {
// remove UUIDs
return input.replaceAll(/[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}/gm, '')
}

export function omitHtmlChangingfields(input: string) {
return input.replace(/(tailwindcss .+ \|)/, 'TAILWIND VERSION')
.replace(/TW Elements \d+\.\d+\.\d+/, 'TW ELEMENTS VERSION')
}
Loading
Loading