Skip to content

Commit

Permalink
docs(site): fix quickstart and update examples
Browse files Browse the repository at this point in the history
  • Loading branch information
fhussonnois committed May 12, 2021
1 parent 1988cc8 commit cfabaa5
Show file tree
Hide file tree
Showing 5 changed files with 85 additions and 97 deletions.
49 changes: 23 additions & 26 deletions examples/connect-file-pulse-example-override-topic-and-key.json
Original file line number Diff line number Diff line change
@@ -1,27 +1,24 @@
{
"config": {
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "ParseDelimitedRow, SetTopic, SetKey",
"filters.SetTopic.value": "replace_all(lowercase($.type), '\\s','-')",
"filters.SetTopic.field": "$topic",
"filters.SetTopic.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
"filters.SetKey.value": "{{ lowercase($.artist) }}-{{ lowercase($.title) }}",
"filters.SetKey.field": "$key",
"filters.SetKey.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
"filters.ParseDelimitedRow.extractColumnName": "headers",
"filters.ParseDelimitedRow.trimColumn": "true",
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
"fs.listing.interval.ms": "10000",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name+hash",
"skip.headers": "1",
"topic": "connect-file-pulse-quickstart-csv",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
},
"name": "connect-file-pulse-quickstart-csv"
}
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "ParseDelimitedRow, SetTopic, SetKey",
"filters.SetTopic.value": "replace_all(lowercase($.type), '\\s','-')",
"filters.SetTopic.field": "$topic",
"filters.SetTopic.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
"filters.SetKey.value": "{{ lowercase($.artist) }}-{{ lowercase($.title) }}",
"filters.SetKey.field": "$key",
"filters.SetKey.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
"filters.ParseDelimitedRow.extractColumnName": "headers",
"filters.ParseDelimitedRow.trimColumn": "true",
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
"fs.listing.interval.ms": "10000",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name+hash",
"skip.headers": "1",
"topic": "connect-file-pulse-quickstart-csv",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
}
27 changes: 12 additions & 15 deletions examples/connect-file-pulse-quickstart-avro.json
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
{
"config": {
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class" : "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
"fs.listing.interval.ms": "10000",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name",
"read.max.wait.ms": "5000",
"topic": "connect-file-pulse-quickstart-avro",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalAvroFileInputReader",
"tasks.max": 1
},
"name": "connect-file-pulse-quickstart-avro"
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class" : "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
"fs.listing.interval.ms": "10000",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name",
"read.max.wait.ms": "5000",
"topic": "connect-file-pulse-quickstart-avro",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalAvroFileInputReader",
"tasks.max": 1
}
47 changes: 22 additions & 25 deletions examples/connect-file-pulse-quickstart-csv.json
Original file line number Diff line number Diff line change
@@ -1,26 +1,23 @@
{
"config": {
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "ParseDelimitedRow, Drop",
"filters.Drop.if": "{{ equals($value.artist, 'U2') }}",
"filters.Drop.invert": "true",
"filters.Drop.type": "io.streamthoughts.kafka.connect.filepulse.filter.DropFilter",
"filters.ParseDelimitedRow.extractColumnName": "headers",
"filters.ParseDelimitedRow.trimColumn": "true",
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
"fs.listing.filters":"io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
"fs.listing.interval.ms": "10000",
"file.filter.regex.pattern":".*\\.csv$",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name+hash",
"skip.headers": "1",
"topic": "connect-file-pulse-quickstart-csv",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
},
"name": "connect-file-pulse-quickstart-csv"
}
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "ParseDelimitedRow, Drop",
"filters.Drop.if": "{{ equals($value.artist, 'U2') }}",
"filters.Drop.invert": "true",
"filters.Drop.type": "io.streamthoughts.kafka.connect.filepulse.filter.DropFilter",
"filters.ParseDelimitedRow.extractColumnName": "headers",
"filters.ParseDelimitedRow.trimColumn": "true",
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
"fs.listing.filters":"io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
"fs.listing.interval.ms": "10000",
"file.filter.regex.pattern":".*\\.csv$",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name+hash",
"skip.headers": "1",
"topic": "connect-file-pulse-quickstart-csv",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
}
49 changes: 23 additions & 26 deletions examples/connect-file-pulse-quickstart-log4j.json
Original file line number Diff line number Diff line change
@@ -1,28 +1,25 @@
{
"config": {
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "GroupMultilineException, ParseLog4jLog",
"filters.GroupMultilineException.negate": "false",
"filters.GroupMultilineException.pattern": "^[\\t]",
"filters.GroupMultilineException.type": "io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter",
"filters.ParseLog4jLog.match": "%{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}",
"filters.ParseLog4jLog.overwrite": "message",
"filters.ParseLog4jLog.source": "message",
"filters.ParseLog4jLog.type": "io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter",
"filters.ParseLog4jLog.ignoreFailure": "true",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
"fs.listing.filters": "io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
"fs.listing.interval.ms": "10000",
"file.filter.regex.pattern":".*\\.log$",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name",
"read.max.wait.ms": "5000",
"topic": "connect-file-pulse-quickstart-log4j",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
},
"name": "connect-file-pulse-quickstart-log4j"
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
"filters": "GroupMultilineException, ParseLog4jLog",
"filters.GroupMultilineException.negate": "false",
"filters.GroupMultilineException.pattern": "^[\\t]",
"filters.GroupMultilineException.type": "io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter",
"filters.ParseLog4jLog.pattern": "%{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}",
"filters.ParseLog4jLog.overwrite": "message",
"filters.ParseLog4jLog.source": "message",
"filters.ParseLog4jLog.type": "io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter",
"filters.ParseLog4jLog.ignoreFailure": "true",
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
"fs.listing.directory.path": "/var/log/kafka/",
"fs.listing.filters": "io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
"fs.listing.interval.ms": "10000",
"file.filter.regex.pattern":".*\\.log$",
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
"internal.kafka.reporter.topic": "connect-file-pulse-status",
"offset.attributes.string": "name",
"read.max.wait.ms": "5000",
"topic": "connect-file-pulse-quickstart-log4j",
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
"tasks.max": 1
}
10 changes: 5 additions & 5 deletions site/content/en/docs/Getting started/_index.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ This example starts a new connector instance to parse the Kafka Connect containe
**1 ) Start a new connector instance**

```bash
$ curl -sSL $GITHUB_REPO_MASTER/config/connect-file-pulse-quickstart-log4j.json -o connect-file-pulse-quickstart-log4j.json
$ curl -sSL $GITHUB_REPO_MASTER/examples/connect-file-pulse-quickstart-log4j.json -o connect-file-pulse-quickstart-log4j.json

$ curl -sX POST http://localhost:8083/connectors \
$ curl -sX PUT http://localhost:8083/connectors/connect-file-pulse-quickstart-log4j/config \
-d @connect-file-pulse-quickstart-log4j.json \
--header "Content-Type: application/json" | jq
```
Expand Down Expand Up @@ -110,17 +110,17 @@ This example starts a new connector instance that parse a CSV file and filter ro
**1 ) Start a new connector instance**

```bash
$ curl -sSL $GITHUB_REPO_MASTER/config/connect-file-pulse-quickstart-csv.json -o connect-file-pulse-quickstart-csv.json
$ curl -sSL $GITHUB_REPO_MASTER/examples/connect-file-pulse-quickstart-csv.json -o connect-file-pulse-quickstart-csv.json

$ curl -sX POST http://localhost:8083/connectors \
$ curl -sX PUT http://localhost:8083/connectors/connect-file-pulse-quickstart-csv/config \
-d @connect-file-pulse-quickstart-csv.json \
--header "Content-Type: application/json" | jq
```

**2 ) Copy example csv file into container**

```bash
$ curl -sSL $GITHUB_REPO_MASTER/examples/quickstart-musics-dataset.csv -o quickstart-musics-dataset.csv
$ curl -sSL $GITHUB_REPO_MASTER/datasets/quickstart-musics-dataset.csv -o quickstart-musics-dataset.csv
$ docker exec -it connect mkdir -p /tmp/kafka-connect/examples
$ docker cp quickstart-musics-dataset.csv connect://tmp/kafka-connect/examples/quickstart-musics-dataset.csv
```
Expand Down

0 comments on commit cfabaa5

Please sign in to comment.