Skip to content

Commit 300360b

Browse files
Amndeep7jbarayuga
andauthored
Dependency track mapper (#3004)
* Add converter for Dependency-Track FPF files * Linting fixes * Update test files for latest mapper update * Update test files * sonarqube says that these ought to be readonly Signed-off-by: Amndeep Singh Mann <amann@mitre.org> * added checkinput step, made output formatted, transitioned to using the extension of of basecommand, fixed help text Signed-off-by: Amndeep Singh Mann <amann@mitre.org> * updated readme Signed-off-by: Amndeep Singh Mann <amann@mitre.org> * remove unused import Signed-off-by: Amndeep Singh Mann <amann@mitre.org> * fixed indentation in readme Signed-off-by: Amndeep Singh Mann <amann@mitre.org> --------- Signed-off-by: Amndeep Singh Mann <amann@mitre.org> Co-authored-by: Jace Barayuga <jbarayuga@referentia.com>
1 parent 42ab38e commit 300360b

15 files changed

+14629
-1
lines changed

README.md

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
4949
* [CKL to POA&amp;M](#ckl-to-poam)
5050
* [CycloneDX SBOM to HDF](#cyclonedx-sbom-to-hdf)
5151
* [DBProtect to HDF](#dbprotect-to-hdf)
52+
* [Dependency-Track to HDF](#dependency-track-to-hdf)
5253
* [Fortify to HDF](#fortify-to-hdf)
5354
* [gosec to HDF](#gosec-to-hdf)
5455
* [Ion Channel 2 HDF](#ion-channel-2-hdf)
@@ -714,6 +715,29 @@ convert dbprotect2hdf Translate a DBProtect report in "Check Results
714715
$ saf convert dbprotect2hdf -i check_results_details_report.xml -o output-hdf-name.json
715716
```
716717

718+
[top](#convert-other-formats-to-hdf)
719+
##### Dependency-Track to HDF
720+
```
721+
convert dependency_track2hdf Translate a Dependency-Track results JSON
722+
file into a Heimdall Data Format JSON file
723+
USAGE
724+
$ saf convert dependency_track2hdf -i <dt-fpf-json> -o <hdf-scan-results-json> [-h] [-w]
725+
726+
FLAGS
727+
-h, --help Show CLI help.
728+
-i, --input=<value> (required) Input Dependency-Track FPF file
729+
-o, --output=<value> (required) Output HDF file
730+
-w, --with-raw
731+
732+
GLOBAL FLAGS
733+
-L, --logLevel=<option> [default: info] Specify level for logging (if implemented by the CLI command)
734+
<options: info|warn|debug|verbose>
735+
--interactive Collect input tags interactively (not available on all CLI commands)
736+
737+
EXAMPLES
738+
saf convert dependency_track2hdf -i dt-fpf.json -o output-hdf-name.json
739+
```
740+
717741
[top](#convert-other-formats-to-hdf)
718742
#### Fortify to HDF
719743
```
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import {Flags} from '@oclif/core'
2+
import fs from 'fs'
3+
import {DependencyTrackMapper as Mapper} from '@mitre/hdf-converters'
4+
import {checkInput, checkSuffix} from '../../utils/global'
5+
import {BaseCommand} from '../../utils/oclif/baseCommand'
6+
7+
export default class DependencyTrack2HDF extends BaseCommand<typeof DependencyTrack2HDF> {
8+
static readonly usage = '<%= command.id %> -i <dt-fpf-json> -o <hdf-scan-results-json> [-h] [-w]'
9+
10+
static readonly description = 'Translate a Dependency-Track results JSON file into a Heimdall Data Format JSON file'
11+
12+
static readonly examples = ['<%= config.bin %> <%= command.id %> -i dt-fpf.json -o output-hdf-name.json']
13+
14+
static readonly flags = {
15+
input: Flags.string({char: 'i', required: true, description: 'Input Dependency-Track FPF file'}),
16+
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}),
17+
'with-raw': Flags.boolean({char: 'w', required: false}),
18+
}
19+
20+
async run() {
21+
const {flags} = await this.parse(DependencyTrack2HDF)
22+
const data = fs.readFileSync(flags.input, 'utf8')
23+
checkInput(
24+
{data, filename: flags.input},
25+
'dependencyTrack',
26+
'Dependency-Track results JSON',
27+
)
28+
29+
const converter = new Mapper(data, flags['with-raw'])
30+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
31+
}
32+
}

src/commands/convert/index.ts

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import {
66
ConveyorResults,
77
CycloneDXSBOMResults,
88
DBProtectMapper,
9+
DependencyTrackMapper,
910
fingerprint,
1011
FortifyMapper,
1112
JfrogXrayMapper,
@@ -84,7 +85,7 @@ export default class Convert extends BaseCommand<typeof Convert> {
8485
}
8586

8687
// catch all other cases:
87-
// 'anchoregrype', 'burp', 'conveyor' 'checklist', 'dbProtect', 'fortify',
88+
// 'anchoregrype', 'burp', 'conveyor' 'checklist', 'dbProtect', 'dependencyTrack', 'fortify',
8889
// 'jfrog', 'msft_secure_score', 'nessus', 'netsparker', 'neuvector' 'nikto',
8990
// 'prisma', 'sarif', 'cyclonedx_sbom', 'scoutsuite', 'snyk', 'trufflehog',
9091
// 'twistlock', 'xccdf'
@@ -176,6 +177,15 @@ export default class Convert extends BaseCommand<typeof Convert> {
176177
break
177178
}
178179

180+
case 'dependencyTrack': {
181+
converter = new DependencyTrackMapper(fs.readFileSync(flags.input, 'utf8'))
182+
fs.writeFileSync(
183+
checkSuffix(flags.output),
184+
JSON.stringify(converter.toHdf(), null, 2),
185+
)
186+
break
187+
}
188+
179189
case 'cyclonedx_sbom': {
180190
converter = new CycloneDXSBOMResults(
181191
fs.readFileSync(flags.input, 'utf8'),
Lines changed: 186 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,186 @@
1+
import {expect, test} from '@oclif/test'
2+
import tmp from 'tmp'
3+
import path from 'path'
4+
import fs from 'fs'
5+
import {omitHDFChangingFields} from '../utils'
6+
7+
describe('Test Dependency-Track', () => {
8+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
9+
10+
test
11+
.stdout()
12+
.command([
13+
'convert dependency_track2hdf',
14+
'-i',
15+
path.resolve(
16+
'./test/sample_data/dependency_track/sample_input_report/fpf-default.json',
17+
),
18+
'-o',
19+
`${tmpobj.name}/dependencytracktest.json`,
20+
])
21+
.it('hdf-converter output test', () => {
22+
const converted = JSON.parse(
23+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
24+
)
25+
const sample = JSON.parse(
26+
fs.readFileSync(
27+
path.resolve('./test/sample_data/dependency_track/hdf-default.json'),
28+
'utf8',
29+
),
30+
)
31+
expect(omitHDFChangingFields(converted)).to.eql(
32+
omitHDFChangingFields(sample),
33+
)
34+
})
35+
})
36+
37+
describe('Test Dependency-Track withraw flag', () => {
38+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
39+
40+
test
41+
.stdout()
42+
.command([
43+
'convert dependency_track2hdf',
44+
'-i',
45+
path.resolve(
46+
'./test/sample_data/dependency_track/sample_input_report/fpf-default.json',
47+
),
48+
'-o',
49+
`${tmpobj.name}/dependencytracktest.json`,
50+
'-w',
51+
])
52+
.it('hdf-converter withraw output test', () => {
53+
const converted = JSON.parse(
54+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
55+
)
56+
const sample = JSON.parse(
57+
fs.readFileSync(
58+
path.resolve('./test/sample_data/dependency_track/hdf-default-withraw.json'),
59+
'utf8',
60+
),
61+
)
62+
expect(omitHDFChangingFields(converted)).to.eql(
63+
omitHDFChangingFields(sample),
64+
)
65+
})
66+
})
67+
68+
describe('Test Dependency-Track optional attributes (e.g. vulnerability.cwes, analysis.state, etc.)', () => {
69+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
70+
71+
test
72+
.stdout()
73+
.command([
74+
'convert dependency_track2hdf',
75+
'-i',
76+
path.resolve(
77+
'./test/sample_data/dependency_track/sample_input_report/fpf-optional-attributes.json',
78+
),
79+
'-o',
80+
`${tmpobj.name}/dependencytracktest.json`,
81+
])
82+
.it('hdf-converter output test', () => {
83+
const converted = JSON.parse(
84+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
85+
)
86+
const sample = JSON.parse(
87+
fs.readFileSync(
88+
path.resolve('./test/sample_data/dependency_track/hdf-optional-attributes.json'),
89+
'utf8',
90+
),
91+
)
92+
expect(omitHDFChangingFields(converted)).to.eql(
93+
omitHDFChangingFields(sample),
94+
)
95+
})
96+
})
97+
98+
describe('Test Dependency-Track no vulnerabilities', () => {
99+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
100+
101+
test
102+
.stdout()
103+
.command([
104+
'convert dependency_track2hdf',
105+
'-i',
106+
path.resolve(
107+
'./test/sample_data/dependency_track/sample_input_report/fpf-no-vulnerabilities.json',
108+
),
109+
'-o',
110+
`${tmpobj.name}/dependencytracktest.json`,
111+
])
112+
.it('hdf-converter output test', () => {
113+
const converted = JSON.parse(
114+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
115+
)
116+
const sample = JSON.parse(
117+
fs.readFileSync(
118+
path.resolve('./test/sample_data/dependency_track/hdf-no-vulnerabilities.json'),
119+
'utf8',
120+
),
121+
)
122+
expect(omitHDFChangingFields(converted)).to.eql(
123+
omitHDFChangingFields(sample),
124+
)
125+
})
126+
})
127+
128+
describe('Test Dependency-Track with attributions', () => {
129+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
130+
131+
test
132+
.stdout()
133+
.command([
134+
'convert dependency_track2hdf',
135+
'-i',
136+
path.resolve(
137+
'./test/sample_data/dependency_track/sample_input_report/fpf-with-attributions.json',
138+
),
139+
'-o',
140+
`${tmpobj.name}/dependencytracktest.json`,
141+
])
142+
.it('hdf-converter output test', () => {
143+
const converted = JSON.parse(
144+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
145+
)
146+
const sample = JSON.parse(
147+
fs.readFileSync(
148+
path.resolve('./test/sample_data/dependency_track/hdf-with-attributions.json'),
149+
'utf8',
150+
),
151+
)
152+
expect(omitHDFChangingFields(converted)).to.eql(
153+
omitHDFChangingFields(sample),
154+
)
155+
})
156+
})
157+
158+
describe('Test Dependency-Track info vulnerability', () => {
159+
const tmpobj = tmp.dirSync({unsafeCleanup: true})
160+
161+
test
162+
.stdout()
163+
.command([
164+
'convert dependency_track2hdf',
165+
'-i',
166+
path.resolve(
167+
'./test/sample_data/dependency_track/sample_input_report/fpf-info-vulnerability.json',
168+
),
169+
'-o',
170+
`${tmpobj.name}/dependencytracktest.json`,
171+
])
172+
.it('hdf-converter output test', () => {
173+
const converted = JSON.parse(
174+
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
175+
)
176+
const sample = JSON.parse(
177+
fs.readFileSync(
178+
path.resolve('./test/sample_data/dependency_track/hdf-info-vulnerability.json'),
179+
'utf8',
180+
),
181+
)
182+
expect(omitHDFChangingFields(converted)).to.eql(
183+
omitHDFChangingFields(sample),
184+
)
185+
})
186+
})

0 commit comments

Comments
 (0)