-
Notifications
You must be signed in to change notification settings - Fork 1
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(asset): add asset data source with utilization query #52
Changes from all commits
fb3b7d7
551b899
ec3eba1
0c16ce4
1cd9609
06b0142
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
import { render } from '@testing-library/react' | ||
import { FetchError } from '@grafana/runtime'; | ||
import { act } from 'react-dom/test-utils'; | ||
import { FloatingError, parseErrorMessage } from './errors'; | ||
import { SystemLinkError } from '../datasources/asset/types'; | ||
import React from 'react'; | ||
|
||
test('renders with error message', () => { | ||
const { container } = render(<FloatingError message='error msg'/>) | ||
|
||
expect(container.innerHTML).toBeTruthy() // refact: get by text | ||
}) | ||
|
||
test('does not render without error message', () => { | ||
const { container } = render(<FloatingError message=''/>) | ||
|
||
expect(container.innerHTML).toBeFalsy() // refact: get by text | ||
}) | ||
|
||
test('hides after timeout', () => { | ||
jest.useFakeTimers(); | ||
|
||
const { container } = render(<FloatingError message='error msg'/>) | ||
act(() => jest.runAllTimers()) | ||
|
||
expect(container.innerHTML).toBeFalsy() | ||
}) | ||
|
||
test('parses error message', () => { | ||
const errorMock: Error = { | ||
name: 'error', | ||
message: 'error message' | ||
} | ||
|
||
const result = parseErrorMessage(errorMock) | ||
|
||
expect(result).toBe(errorMock.message) | ||
}) | ||
|
||
test('parses fetch error message', () => { | ||
const fetchErrorMock: FetchError = { | ||
status: 404, | ||
data: { message: 'error message' }, | ||
config: { url: 'URL' } | ||
} | ||
|
||
const result = parseErrorMessage(fetchErrorMock as any) | ||
|
||
expect(result).toBe(fetchErrorMock.data.message) | ||
}) | ||
|
||
test('parses fetch error status text', () => { | ||
const fetchErrorMock: FetchError = { | ||
status: 404, | ||
data: {}, | ||
statusText: 'statusText', | ||
config: { url: 'URL' } | ||
} | ||
|
||
const result = parseErrorMessage(fetchErrorMock as any) | ||
|
||
expect(result).toBe(fetchErrorMock.statusText) | ||
}) | ||
|
||
test('parses SystemLink error code', () => { | ||
const systemLinkError: SystemLinkError = { | ||
error: { | ||
name: 'name', | ||
args: [], | ||
code: -255130, | ||
message: 'error message' | ||
} | ||
} | ||
const fetchErrorMock: FetchError = { | ||
status: 404, | ||
data: systemLinkError, | ||
statusText: 'statusText', | ||
config: { url: 'URL' } | ||
} | ||
|
||
const result = parseErrorMessage(fetchErrorMock as any) | ||
|
||
expect(result).toBe(fetchErrorMock.statusText) | ||
}) | ||
|
||
test('parses SystemLink error message', () => { | ||
const systemLinkError: SystemLinkError = { | ||
error: { | ||
name: 'name', | ||
args: [], | ||
code: 123, | ||
message: 'error message' | ||
} | ||
} | ||
const fetchErrorMock: FetchError = { | ||
status: 404, | ||
data: systemLinkError, | ||
statusText: 'statusText', | ||
config: { url: 'URL' } | ||
} | ||
|
||
const result = parseErrorMessage(fetchErrorMock as any) | ||
|
||
expect(result).toBe(fetchErrorMock.statusText) | ||
}) |
Original file line number | Diff line number | Diff line change | ||
---|---|---|---|---|
@@ -0,0 +1,115 @@ | ||||
import { BackendSrv } from "@grafana/runtime"; | ||||
import { MockProxy } from "jest-mock-extended"; | ||||
import { | ||||
createFetchError, | ||||
createFetchResponse, | ||||
getQueryBuilder, | ||||
mockTimers, | ||||
peakDaysMock, | ||||
requestMatching, | ||||
setupDataSource, | ||||
assetModelMock, | ||||
} from "test/fixtures"; | ||||
import { AssetDataSource } from "./AssetDataSource"; | ||||
import { | ||||
AssetsResponse, | ||||
AssetQueryType, | ||||
AssetQuery, | ||||
EntityType, | ||||
IsNIAsset, | ||||
IsPeak, | ||||
UtilizationCategory, | ||||
TimeFrequency, PolicyOption | ||||
} from "./types"; | ||||
import { dateTime } from "@grafana/data"; | ||||
|
||||
|
||||
let ds: AssetDataSource, backendSrv: MockProxy<BackendSrv> | ||||
|
||||
beforeEach(() => { | ||||
[ds, backendSrv] = setupDataSource(AssetDataSource); | ||||
}); | ||||
|
||||
const assetUtilizationQueryMock: AssetQuery = { | ||||
assetQueryType: AssetQueryType.METADATA, | ||||
workspace: '', | ||||
entityType: EntityType.ASSET, | ||||
isPeak: IsPeak.NONPEAK, | ||||
peakDays: peakDaysMock, | ||||
refId: '', | ||||
utilizationCategory: UtilizationCategory.TEST, | ||||
assetIdentifier: '321', | ||||
isNIAsset: IsNIAsset.NIASSET, | ||||
minionId: '123', | ||||
timeFrequency: TimeFrequency.DAILY, | ||||
peakStart: dateTime(new Date(2024, 1, 1, 9, 0)), | ||||
nonPeakStart: dateTime(new Date(2024, 1, 1, 17, 0)), | ||||
policyOption: PolicyOption.DEFAULT | ||||
} | ||||
|
||||
const dataFrameDTOMock = [ | ||||
{ name: 'model name', values: [''] }, | ||||
{ name: 'serial number', values: [''] }, | ||||
{ name: 'bus type', values: ['USB'] }, | ||||
{ name: 'asset type', values: ['DEVICE_UNDER_TEST'] }, | ||||
{ name: 'is NI asset', values: [true] }, | ||||
{ | ||||
name: 'calibration status', | ||||
values: ['APPROACHING_RECOMMENDED_DUE_DATE'] | ||||
}, | ||||
{ name: 'is system controller', values: [true] }, | ||||
{ name: 'workspace', values: [''] }, | ||||
{ name: 'last updated timestamp', values: [''] }, | ||||
{ name: 'minionId', values: ['minion1'] }, | ||||
{ name: 'parent name', values: [''] }, | ||||
{ name: 'system name', values: ['system1'] }, | ||||
{ | ||||
name: 'calibration due date', | ||||
values: ['2019-05-07T18:58:05.000Z'] | ||||
} | ||||
] | ||||
|
||||
const buildQuery = getQueryBuilder<AssetQuery>()({}); | ||||
|
||||
mockTimers(); | ||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This isn't necessary—you only need this if you're testing code that relies on |
||||
|
||||
describe('testDatasource', () => { | ||||
test('returns success', async () => { | ||||
backendSrv.fetch | ||||
.calledWith(requestMatching({ url: '/niapm/v1/assets' })) | ||||
.mockReturnValue(createFetchResponse(25)); | ||||
|
||||
const result = await ds.testDatasource(); | ||||
|
||||
expect(result.status).toEqual('success'); | ||||
}); | ||||
|
||||
test('bubbles up exception', async () => { | ||||
backendSrv.fetch | ||||
.calledWith(requestMatching({ url: '/niapm/v1/assets' })) | ||||
.mockReturnValue(createFetchError(400)); | ||||
|
||||
await expect(ds.testDatasource()).rejects.toHaveProperty('status', 400); | ||||
}); | ||||
}) | ||||
|
||||
describe('queries', () => { | ||||
test('runs metadata query', async () => { | ||||
const queryAssets = backendSrv.fetch | ||||
.calledWith(requestMatching({ url: '/niapm/v1/query-assets' })) | ||||
.mockReturnValue(createFetchResponse({ assets: assetModelMock, totalCount: 0 } as AssetsResponse)) | ||||
|
||||
const result = await ds.query(buildQuery(assetUtilizationQueryMock)) | ||||
|
||||
expect(result.data[0].fields).toEqual(expect.arrayContaining(dataFrameDTOMock)) | ||||
expect(queryAssets).toHaveBeenCalledTimes(1) | ||||
}) | ||||
|
||||
test('handles query error', async () => { | ||||
backendSrv.fetch | ||||
.calledWith(requestMatching({ url: '/niapm/v1/query-assets' })) | ||||
.mockReturnValue(createFetchError(418)) | ||||
|
||||
await expect(ds.query(buildQuery(assetUtilizationQueryMock))).rejects.toThrow() | ||||
}) | ||||
}) | ||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We should have tests that verify the output of the utilization queries. There's a lot of logic in the data source for building up filters and parsing responses that needs coverage. You can use Jest's snapshot testing to make it really easy to assert on the output. That's what we do in the tag data source:
I would also want to see a test that verifies that we replace template variables in queries correctly. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for writing these tests!