Skip to content

Commit

Permalink
fixes from pre-commit run: typos and graphql formatting (#211)
Browse files Browse the repository at this point in the history
pre-commit run --show-diff-on-failure --color=always --all-files
  • Loading branch information
bwiggs authored Nov 15, 2024
1 parent 9881822 commit 05aa003
Show file tree
Hide file tree
Showing 8 changed files with 7 additions and 11 deletions.
2 changes: 1 addition & 1 deletion docker/docker-compose-riverboat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ services:
image: ghcr.io/theopenlane/riverboat:arm64-78-90832455
container_name: riverboat
environment:
# this needs to use host.docker.internal since it is a seperate docker-compose file
# this needs to use host.docker.internal since it is a separate docker-compose file
- RIVERBOAT_JOBQUEUE_DATABASEHOST=postgres://postgres:password@host.docker.internal:5432/jobs?sslmode=disable
- RIVERBOAT_RUNMIGRATIONS=true
command:
Expand Down
2 changes: 1 addition & 1 deletion pkg/objects/objects.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ type File struct {
ContentType string `json:"content_type,omitempty"`
// Size in bytes of the uploaded file
Size int64 `json:"size,omitempty"`
// Metdata is a map of key value pairs that can be used to store additional information about the file
// Metadata is a map of key value pairs that can be used to store additional information about the file
Metadata map[string]string `json:"metadata,omitempty"`
// Bucket is the bucket that the file is stored in
Bucket string `json:"bucket,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion pkg/objects/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ func WithDownloadFileOptions(opts *DownloadFileOptions) Option {
}
}

func WithMetdata(mp map[string]interface{}) Option {
func WithMetadata(mp map[string]interface{}) Option {
return func(o *Objects) {
if o.UploadFileOptions.Metadata == nil {
o.UploadFileOptions.Metadata = map[string]string{}
Expand Down
3 changes: 2 additions & 1 deletion query/feature.graphql
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

mutation CreateBulkCSVFeature($input: Upload!) {
createBulkCSVFeature(input: $input) {
features {
Expand Down Expand Up @@ -86,6 +85,7 @@ query GetAllFeatures {
}
}
}

query GetFeatureByID($featureId: ID!) {
feature(id: $featureId) {
createdAt
Expand Down Expand Up @@ -125,6 +125,7 @@ query GetFeatures($where: FeatureWhereInput) {
}
}
}

mutation UpdateFeature($updateFeatureId: ID!, $input: UpdateFeatureInput!) {
updateFeature(id: $updateFeatureId, input: $input) {
feature {
Expand Down
2 changes: 0 additions & 2 deletions query/internalpolicyhistory.graphql
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


query GetAllInternalPolicyHistories {
internalPolicyHistories {
edges {
Expand Down
2 changes: 0 additions & 2 deletions query/organizationhistory.graphql
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


query GetAllOrganizationHistories {
organizationHistories {
edges {
Expand Down
3 changes: 2 additions & 1 deletion query/standard.graphql
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

mutation CreateBulkCSVStandard($input: Upload!) {
createBulkCSVStandard(input: $input) {
standards {
Expand Down Expand Up @@ -98,6 +97,7 @@ query GetAllStandards {
}
}
}

query GetStandardByID($standardId: ID!) {
standard(id: $standardId) {
background
Expand Down Expand Up @@ -143,6 +143,7 @@ query GetStandards($where: StandardWhereInput) {
}
}
}

mutation UpdateStandard($updateStandardId: ID!, $input: UpdateStandardInput!) {
updateStandard(id: $updateStandardId, input: $input) {
standard {
Expand Down
2 changes: 0 additions & 2 deletions query/usersettinghistory.graphql
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


query GetAllUserSettingHistories {
userSettingHistories {
edges {
Expand Down

0 comments on commit 05aa003

Please sign in to comment.