Skip to content

Commit

Permalink
chore(deps): update ghcr.io/open-feature/flagd-testbed docker tag to …
Browse files Browse the repository at this point in the history
…v0.5.13 (#1068)

Signed-off-by: Todd Baert <todd.baert@dynatrace.com>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Todd Baert <todd.baert@dynatrace.com>
  • Loading branch information
renovate[bot] and toddbaert authored Oct 18, 2024
1 parent 6a84d05 commit 75c5b10
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 20 deletions.
2 changes: 1 addition & 1 deletion libs/providers/flagd/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
flagd:
image: ghcr.io/open-feature/flagd-testbed:v0.5.6
image: ghcr.io/open-feature/flagd-testbed:v0.5.13
ports:
- 8013:8013
flagd-unstable:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,26 +22,36 @@ export function flagdRecconnectUnstable() {
});
});

test('Provider reconnection', ({ given, when, then, and }) => {
given('a flagd provider is set', () => {
// handled in beforeAll
});
when('a PROVIDER_READY handler and a PROVIDER_ERROR handler are added', () => {
client.addHandler(ProviderEvents.Error, () => {
errorRunCount++;
describe('retry', () => {
/**
* This describe block and retry settings are calibrated to gRPC's retry time
* and our testing container's restart cadence.
*/
const retryTimes = 240;
const retryDelayMs = 1000;
jest.retryTimes(retryTimes);

test('Provider reconnection', ({ given, when, then, and }) => {
given('a flagd provider is set', () => {
// handled in beforeAll
});
when('a PROVIDER_READY handler and a PROVIDER_ERROR handler are added', () => {
client.addHandler(ProviderEvents.Error, () => {
errorRunCount++;
});
});
then('the PROVIDER_READY handler must run when the provider connects', async () => {
// should already be at 1 from `beforeAll`
expect(readyRunCount).toEqual(1);
});
and("the PROVIDER_ERROR handler must run when the provider's connection is lost", async () => {
await new Promise((resolve) => setTimeout(resolve, retryDelayMs));
expect(errorRunCount).toBeGreaterThan(0);
});
and('when the connection is reestablished the PROVIDER_READY handler must run again', async () => {
await new Promise((resolve) => setTimeout(resolve, retryDelayMs));
expect(readyRunCount).toBeGreaterThan(1);
});
});
then('the PROVIDER_READY handler must run when the provider connects', async () => {
// should already be at 1 from `beforeAll`
expect(readyRunCount).toEqual(1);
});
and("the PROVIDER_ERROR handler must run when the provider's connection is lost", async () => {
await new Promise((resolve) => setTimeout(resolve, 10000));
expect(errorRunCount).toBeGreaterThan(0);
});
and('when the connection is reestablished the PROVIDER_READY handler must run again', async () => {
await new Promise((resolve) => setTimeout(resolve, 10000));
expect(readyRunCount).toBeGreaterThan(1);
});
});

Expand Down

0 comments on commit 75c5b10

Please sign in to comment.