Skip to content

Commit

Permalink
Merge pull request #122 from RADAR-base/release-0.3.3
Browse files Browse the repository at this point in the history
Release 0.3.3
  • Loading branch information
yatharthranjan authored May 15, 2018
2 parents 71f28c3 + e5b797c commit 6151cb4
Show file tree
Hide file tree
Showing 21 changed files with 235 additions and 28 deletions.
12 changes: 6 additions & 6 deletions commons/active/thincit/thinc_it_pdq.avsc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
{ "name": "time", "type": "double", "doc": "Timestamp in UTC (s) when the test is submitted to the subject." },
{ "name": "timeCompleted", "type": "double", "doc": "Timestamp in UTC (s) when the subject completes the test." },
{ "name": "score", "type": "int", "doc": "THINC-it index score - 0 to 4000." },
{ "name": "questionOne", "type": ["null",
{ "name": "questionOne", "type":
{
"type": "enum",
"name": "ThincItPdqQuestion",
Expand All @@ -19,11 +19,11 @@
"OFTEN",
"VERYOFTEN"
]
}], "doc": "PDQ5 Question 1.", "default": null},
{ "name": "questionTwo", "type": ["null", "ThincItPdqQuestion"], "doc": "PDQ5 Question 2.", "default": null},
{ "name": "questionThree", "type": ["null", "ThincItPdqQuestion"], "doc": "PDQ5 Question 3.", "default": null},
{ "name": "questionFour", "type": ["null", "ThincItPdqQuestion"], "doc": "PDQ5 Question 4.", "default": null},
{ "name": "questionFive", "type": ["null", "ThincItPdqQuestion"], "doc": "PDQ5 Question 5.", "default": null},
}, "doc": "PDQ5 Question 1."},
{ "name": "questionTwo", "type": "ThincItPdqQuestion", "doc": "PDQ5 Question 2."},
{ "name": "questionThree", "type": "ThincItPdqQuestion", "doc": "PDQ5 Question 3."},
{ "name": "questionFour", "type": "ThincItPdqQuestion", "doc": "PDQ5 Question 4."},
{ "name": "questionFive", "type": "ThincItPdqQuestion", "doc": "PDQ5 Question 5."},
{ "name": "appVersion", "type": "int", "doc": "App version." }
]
}
13 changes: 13 additions & 0 deletions commons/connector/fitbit/fitbit_intraday_heart_rate.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"namespace": "org.radarcns.connector.fitbit",
"type": "record",
"name": "FitbitIntradayHeartRate",
"doc": "Intra day heart rate data from fitbit device.",
"fields": [
{ "name": "time", "type": "double", "doc": "Device timestamp in UTC (s)." },
{ "name": "timeReceived", "type": "double", "doc": "Time that the data was received from the Fitbit API (seconds since the Unix Epoch)." },
{ "name": "timeInterval", "type": "int", "doc": "Chronological window size (s)." },
{ "name": "timezoneOffset", "type": "int", "doc": "Offset from UTC (s)." },
{ "name": "heartRate", "type": "int", "doc":"Heart rate value (bpm)."}
]
}
13 changes: 13 additions & 0 deletions commons/connector/fitbit/fitbit_intraday_steps.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"namespace": "org.radarcns.connector.fitbit",
"type": "record",
"name": "FitbitIntradaySteps",
"doc": "Intra day steps data from fitbit device.",
"fields": [
{ "name": "time", "type": "double", "doc": "Device timestamp in UTC (s)." },
{ "name": "timeReceived", "type": "double", "doc": "Time that the data was received from the Fitbit API (seconds since the Unix Epoch)." },
{ "name": "timeInterval", "type": "int", "doc": "Chronological window size (s)." },
{ "name": "timezoneOffset", "type": "int", "doc": "Offset from UTC (s)." },
{ "name": "steps", "type": "int", "doc":"Steps taken in this period."}
]
}
13 changes: 13 additions & 0 deletions commons/connector/fitbit/fitbit_sleep_pattern.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"namespace": "org.radarcns.connector.fitbit",
"type": "record",
"name": "FitbitSleepPattern",
"doc": "Classic sleep data as defined at https://dev.fitbit.com/build/reference/web-api/sleep/.",
"fields": [
{ "name": "time", "type": "double", "doc": "Device timestamp in UTC (s)." },
{ "name": "timeReceived", "type": "double", "doc": "Time that the data was received from the Fitbit API (seconds since the Unix Epoch)." },
{ "name": "level", "type": "string", "doc": "Level of sleep, 'awake', 'restless', or 'asleep'." },
{ "name": "timezoneOffset", "type": "int", "doc": "Offset from UTC (s)." },
{ "name": "sleepDuration", "type": "int", "doc":"Duration at this sleep characteristic in seconds." }
]
}
13 changes: 13 additions & 0 deletions commons/connector/fitbit/fitbit_sleep_stage.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"namespace": "org.radarcns.connector.fitbit",
"type": "record",
"name": "FitbitSleepStage",
"doc": "Fitbit 'stages' sleep data as defined at https://dev.fitbit.com/build/reference/web-api/sleep/.",
"fields": [
{ "name": "time", "type": "double", "doc": "Device timestamp in UTC (s)." },
{ "name": "timeReceived", "type": "double", "doc": "Time that the data was received from the Fitbit API (seconds since the Unix Epoch)." },
{ "name": "level", "type": "string", "doc": "Level of sleep, 'deep', 'light', 'rem', or 'awake'." },
{ "name": "timezoneOffset", "type": "int", "doc": "Offset from UTC (s)." },
{ "name": "sleepDuration", "type": "int", "doc":"Duration at this sleep characteristic in seconds." }
]
}
11 changes: 11 additions & 0 deletions commons/monitor/questionnaire/questionnaire_completion_log.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"namespace": "org.radarcns.monitor.questionnaire",
"type": "record",
"name": "QuestionnaireCompletionLog",
"doc": "Schema for reporting the completion status of a questionnaire. This will help in calculating the compliance.",
"fields": [
{ "name": "time", "type": "double", "doc": "Timestamp in UTC (s) when the questionnaire completion log is submitted." },
{ "name": "name", "type": "string", "doc": "Questionnaire name." },
{ "name": "completionPercentage", "type": [ "null", "double"], "doc": "Percentage of the questionnaire completed. 0 for not at all complete and 100 for full completion. Null if no completion value possible." , "default": null }
]
}
2 changes: 1 addition & 1 deletion java-sdk/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ subprojects {
apply plugin: 'idea'

// Configuration
version = '0.3.2'
version = '0.3.3'
group = 'org.radarcns'
ext.githubRepoName = 'RADAR-CNS/RADAR-Schemas'

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import java.util.Locale;

public enum Scope {
ACTIVE, KAFKA, CATALOGUE, MONITOR, PASSIVE, STREAM;
ACTIVE, KAFKA, CATALOGUE, MONITOR, PASSIVE, STREAM, CONNECTOR;

private final String lower;

Expand Down
1 change: 1 addition & 0 deletions java-sdk/radar-schemas-tools/config/pmd/ruleset.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

<rule ref="rulesets/java/strings.xml">
<exclude name="ConsecutiveAppendsShouldReuse"/>
<exclude name="AvoidDuplicateLiterals"/>
</rule>

<rule ref="rulesets/java/unusedcode.xml">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ public List<String> getRawTopics() {
return Stream.of(
catalogue.getPassiveSources(),
catalogue.getActiveSources(),
catalogue.getMonitorSources())
catalogue.getMonitorSources(),
catalogue.getConnectorSources())
.flatMap(map -> map.values().stream())
.flatMap(DataProducer::getTopicNames)
.sorted()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
import java.net.MalformedURLException;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Stream;

import static org.radarcns.schema.CommandLineApp.matchTopic;

Expand Down Expand Up @@ -73,16 +72,13 @@ public SchemaRegistry(String baseUrl) throws MalformedURLException {
}

/**
* Register all schemas in a source catalogue. Stream sources are ignored.
* Register all schemas in a source catalogue. Stream and connector sources are ignored.
* @param catalogue schema catalogue to read schemas from
* @return whether all schemas were successfully registered.
*/
public boolean registerSchemas(SourceCatalogue catalogue) {
return Stream.of(
catalogue.getActiveSources(),
catalogue.getPassiveSources(),
catalogue.getMonitorSources())
.flatMap(m -> m.values().stream())
return catalogue.getSources().stream()
.filter(DataProducer::doRegisterSchema)
.flatMap(DataProducer::getTopics)
.allMatch(this::registerSchema);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@

import static org.radarcns.schema.util.Utils.applyOrEmpty;

/**
* A producer of data to Kafka, generally mapping to a source.
* @param <T> type of data that is produced.
*/
public abstract class DataProducer<T extends DataTopic> {
@JsonProperty @NotBlank
private String name;
Expand All @@ -26,6 +30,14 @@ public abstract class DataProducer<T extends DataTopic> {
@JsonProperty
private List<String> labels;

/**
* If true, register the schema during kafka initialization, otherwise, the producer should do
* that itself. The default is true, set in the constructor of subclasses to use a different
* default.
*/
@JsonProperty("register_schema")
protected boolean registerSchema = true;

public String getName() {
return name;
}
Expand Down Expand Up @@ -56,6 +68,9 @@ public Stream<String> getTopicNames() {
return getData().stream().flatMap(applyOrEmpty(DataTopic::getTopics));
}

public boolean doRegisterSchema() {
return registerSchema;
}

@Override
public boolean equals(Object o) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.nio.file.InvalidPathException;
import org.radarcns.schema.Scope;
import org.radarcns.schema.specification.active.ActiveSource;
import org.radarcns.schema.specification.connector.ConnectorSource;
import org.radarcns.schema.specification.stream.StreamGroup;
import org.radarcns.schema.specification.monitor.MonitorSource;
import org.radarcns.schema.specification.passive.PassiveSource;
Expand Down Expand Up @@ -59,6 +60,7 @@ public class SourceCatalogue {
private final Map<String, ActiveSource<?>> activeSources;
private final Map<String, MonitorSource> monitorSources;
private final Map<String, PassiveSource> passiveSources;
private final Map<String, ConnectorSource> connectorSources;
private final Map<String, StreamGroup> streamGroups;

private final Set<DataProducer<?>> sources;
Expand All @@ -67,18 +69,21 @@ public class SourceCatalogue {
SourceCatalogue(Map<String, ActiveSource<?>> activeSources,
Map<String, MonitorSource> monitorSources,
Map<String, PassiveSource> passiveSources,
Map<String, StreamGroup> streamGroups) {
Map<String, StreamGroup> streamGroups,
Map<String, ConnectorSource> connectorSources) {
this.activeSources = activeSources;
this.monitorSources = monitorSources;
this.passiveSources = passiveSources;
this.streamGroups = streamGroups;
this.connectorSources = connectorSources;

sources = new HashSet<>();

sources.addAll(activeSources.values());
sources.addAll(monitorSources.values());
sources.addAll(passiveSources.values());
sources.addAll(streamGroups.values());
sources.addAll(connectorSources.values());
}

/**
Expand Down Expand Up @@ -107,7 +112,8 @@ public static SourceCatalogue load(Path root) throws IOException {
initSources(mapper.readerFor(ActiveSource.class), specRoot, Scope.ACTIVE),
initSources(mapper.readerFor(MonitorSource.class), specRoot, Scope.MONITOR),
initSources(mapper.readerFor(PassiveSource.class), specRoot, Scope.PASSIVE),
initSources(mapper.readerFor(StreamGroup.class), specRoot, Scope.STREAM));
initSources(mapper.readerFor(StreamGroup.class), specRoot, Scope.STREAM),
initSources(mapper.readerFor(ConnectorSource.class), specRoot, Scope.CONNECTOR));
}

private static <T> Map<String, T> initSources(ObjectReader reader, Path root, Scope scope)
Expand Down Expand Up @@ -207,6 +213,14 @@ public Stream<String> getTopicNames() {
.flatMap(DataProducer::getTopicNames);
}

/**
* TODO.
* @return TODO
*/
public Map<String, ConnectorSource> getConnectorSources() {
return connectorSources;
}

/** Get all topics in the catalogue. */
public Stream<AvroTopic<?, ?>> getTopics() {
return sources.stream()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package org.radarcns.schema.specification.connector;

import com.fasterxml.jackson.annotation.JsonProperty;
import org.radarcns.schema.Scope;
import org.radarcns.schema.specification.DataProducer;
import org.radarcns.schema.specification.DataTopic;

import java.util.List;

/**
* Data producer for third-party connectors. This data topic does not register schemas to the schema
* registry by default, since Kafka Connect will do that itself. To enable auto-registration, set
* the {@code register_schema} property to {@code true}.
*/
public class ConnectorSource extends DataProducer<DataTopic> {
@JsonProperty
private List<DataTopic> data;

public ConnectorSource() {
registerSchema = false;
}

@Override
public List<DataTopic> getData() {
return data;
}

@Override
public Scope getScope() {
return Scope.CONNECTOR;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@
import org.radarcns.stream.TimeWindowMetadata;
import org.radarcns.topic.AvroTopic;

/**
* Topic used for Kafka Streams.
*/
public class StreamDataTopic extends DataTopic {

/** Whether the stream is a windowed stream with standard TimeWindow windows. */
@JsonProperty
private boolean windowed = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,22 @@
import java.util.List;
import java.util.stream.Stream;

/**
* Data producer for Kafka Streams. This data topic does not register schemas to the schema registry
* by default, since Kafka Streams will do that itself. To disable this, set the
* {@code register_schema} property to {@code true}.
*/
public class StreamGroup extends DataProducer<StreamDataTopic> {
@JsonProperty @NotEmpty
private List<StreamDataTopic> data;

@JsonProperty
private String master;

public StreamGroup() {
registerSchema = false;
}

@Override
public List<StreamDataTopic> getData() {
return data;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,29 @@
public interface SchemaRules {
SchemaFieldRules getFieldRules();

/** Checks that schemas are unique compared to already validated schemas. */
/**
* Checks that schemas are unique compared to already validated schemas.
*/
Validator<Schema> validateUniqueness();

/** Checks schema namespace format. */
/**
* Checks schema namespace format.
*/
Validator<Schema> validateNameSpace();

/** Checks schema name format. */
/**
* Checks schema name format.
*/
Validator<Schema> validateName();

/** Checks schema documentation presence and format. */
/**
* Checks schema documentation presence and format.
*/
Validator<Schema> validateSchemaDocumentation();

/** Checks that the symbols of enums have the required format. */
/**
* Checks that the symbols of enums have the required format.
*/
Validator<Schema> validateSymbols();

/**
Expand Down Expand Up @@ -49,7 +59,9 @@ public interface SchemaRules {
*/
Validator<Schema> validateNotTimeReceived();

/** Validate an enum. */
/**
* Validate an enum.
*/
default Validator<Schema> validateEnum() {
return validateUniqueness()
.and(validateNameSpace())
Expand All @@ -58,7 +70,9 @@ default Validator<Schema> validateEnum() {
.and(validateName());
}

/** Validate a record that is defined inline. */
/**
* Validate a record that is defined inline.
*/
default Validator<Schema> validateRecord() {
return validateUniqueness()
.and(validateNameSpace())
Expand All @@ -69,6 +83,7 @@ default Validator<Schema> validateRecord() {

/**
* Validates record schemas of an active source.
*
* @return TODO
*/
default Validator<Schema> validateActiveSource() {
Expand All @@ -80,6 +95,7 @@ default Validator<Schema> validateActiveSource() {

/**
* Validates schemas of monitor sources.
*
* @return TODO
*/
default Validator<Schema> validateMonitor() {
Expand Down
Loading

0 comments on commit 6151cb4

Please sign in to comment.