Skip to content

Commit

Permalink
Merge branch 'development'
Browse files Browse the repository at this point in the history
  • Loading branch information
jachro committed Apr 5, 2023
2 parents f8d5e8c + bfb8041 commit 858a86e
Show file tree
Hide file tree
Showing 26 changed files with 957 additions and 368 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Copyright 2023 Swiss Data Science Center (SDSC)
* A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
* Eidgenössische Technische Hochschule Zürich (ETHZ).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.renku.entities.viewings.collector
package datasets

import cats.effect.Async
import cats.syntax.all._
import cats.MonadThrow
import eu.timepit.refined.auto._
import io.renku.entities.viewings.collector.persons.Dataset
import io.renku.graph.model.{datasets, projects}
import io.renku.triplesstore.{ProjectsConnectionConfig, SparqlQueryTimeRecorder, TSClient}
import org.typelevel.log4cats.Logger

private trait DSInfoFinder[F[_]] {
def findDSInfo(identifier: datasets.Identifier): F[Option[DSInfo]]
}

private final case class DSInfo(projectPath: projects.Path, dataset: Dataset)

private object DSInfoFinder {
def apply[F[_]: Async: Logger: SparqlQueryTimeRecorder]: F[DSInfoFinder[F]] =
ProjectsConnectionConfig[F]().map(TSClient[F](_)).map(new DSInfoFinderImpl[F](_))
}

private class DSInfoFinderImpl[F[_]: MonadThrow](tsClient: TSClient[F]) extends DSInfoFinder[F] {

import io.circe.Decoder
import io.renku.graph.model.Schemas.{renku, schema}
import io.renku.triplesstore.{ResultsDecoder, SparqlQuery}
import io.renku.triplesstore.SparqlQuery.Prefixes
import io.renku.triplesstore.client.syntax._
import io.renku.triplesstore.ResultsDecoder._
import tsClient._

override def findDSInfo(identifier: datasets.Identifier): F[Option[DSInfo]] =
findProjects(identifier)
.map(findOldestProject)
.map(_.map { case (path, _, dataset) => DSInfo(path, dataset) })

private type Row = (projects.Path, projects.DateCreated, Dataset)

private def findProjects(identifier: datasets.Identifier): F[List[Row]] = queryExpecting(
SparqlQuery
.ofUnsafe(
show"${categoryName.show.toLowerCase}: find projects",
Prefixes of (schema -> "schema", renku -> "renku"),
sparql"""|SELECT ?path ?dateCreated ?dsId ?dsIdentifier
|WHERE {
| GRAPH ?projectId {
| BIND (${identifier.asObject} AS ?dsIdentifier)
| ?dsId a schema:Dataset;
| schema:identifier ?dsIdentifier.
| ?projectId a schema:Project;
| renku:projectPath ?path;
| schema:dateCreated ?dateCreated.
| }
|}
|""".stripMargin
)
)(rowsDecoder)

private lazy val rowsDecoder: Decoder[List[Row]] = ResultsDecoder[List, Row] { implicit cur =>
import io.renku.tinytypes.json.TinyTypeDecoders._
for {
path <- extract[projects.Path]("path")
date <- extract[projects.DateCreated]("dateCreated")
dsId <- extract[datasets.ResourceId]("dsId")
dsIdentifier <- extract[datasets.Identifier]("dsIdentifier")
} yield (path, date, Dataset(dsId, dsIdentifier))
}

private lazy val findOldestProject: List[Row] => Option[Row] =
_.sortBy(_._2).headOption
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@
* limitations under the License.
*/

package io.renku.entities.viewings.collector.datasets
package io.renku.entities.viewings.collector
package datasets

import cats.effect.Async
import cats.syntax.all._
import cats.MonadThrow
import cats.data.OptionT
import io.renku.entities.viewings.collector.persons.{GLUserViewedDataset, PersonViewedDatasetPersister}
import io.renku.entities.viewings.collector.projects.viewed.EventPersister
import io.renku.graph.model.projects
import io.renku.triplesgenerator.api.events.{DatasetViewedEvent, ProjectViewedEvent, UserId}
Expand All @@ -34,22 +36,39 @@ private trait EventUploader[F[_]] {

private object EventUploader {
def apply[F[_]: Async: Logger: SparqlQueryTimeRecorder]: F[EventUploader[F]] =
(ProjectFinder[F], EventPersister[F])
.mapN(new EventUploaderImpl[F](_, _))
(DSInfoFinder[F], EventPersister[F], PersonViewedDatasetPersister[F])
.mapN(new EventUploaderImpl[F](_, _, _))
}

private class EventUploaderImpl[F[_]: MonadThrow](
projectFinder: ProjectFinder[F],
eventPersister: EventPersister[F]
dsInfoFinder: DSInfoFinder[F],
projectViewedEventPersister: EventPersister[F],
personViewedDatasetPersister: PersonViewedDatasetPersister[F]
) extends EventUploader[F] {

import eventPersister._
import projectFinder._
import dsInfoFinder._

override def upload(event: DatasetViewedEvent): F[Unit] =
OptionT(findProject(event.identifier))
.map(ProjectViewedEvent(_, projects.DateViewed(event.dateViewed.value), event.maybeUserId.map(UserId(_))))
.semiflatMap(persist)
OptionT(findDSInfo(event.identifier))
.semiflatTap(persistProjectViewedEvent(_, event))
.semiflatTap(persistPersonViewedDataset(_, event))
.value
.void

private def persistProjectViewedEvent(dsInfo: DSInfo, event: DatasetViewedEvent) =
projectViewedEventPersister.persist(
ProjectViewedEvent(dsInfo.projectPath,
projects.DateViewed(event.dateViewed.value),
event.maybeUserId.map(UserId(_))
)
)

private def persistPersonViewedDataset(dsInfo: DSInfo, event: DatasetViewedEvent) =
event.maybeUserId match {
case None => ().pure[F]
case Some(glId) =>
personViewedDatasetPersister.persist(
GLUserViewedDataset(UserId(glId), dsInfo.dataset, event.dateViewed)
)
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
* limitations under the License.
*/

package io.renku.entities.viewings.collector.projects
package io.renku.entities.viewings.collector

import io.renku.graph.model.Schemas.{renku, xsd}
import io.renku.graph.model.entities.Project
import io.renku.graph.model.entities.{Dataset, Project}
import io.renku.jsonld.ontology.{Class, DataProperties, DataProperty, ObjectProperties, ObjectProperty, Type}
import io.renku.jsonld.Property

Expand All @@ -38,10 +38,12 @@ object PersonViewingOntology {

val classType: Property = renku / "PersonViewing"
val viewedProjectProperty: Property = renku / "viewedProject"
val viewedDatasetProperty: Property = renku / "viewedDataset"

lazy val typeDef: Type = Type.Def(
Class(classType),
ObjectProperty(viewedProjectProperty, PersonViewedProjectOntology.typeDef)
ObjectProperty(viewedProjectProperty, PersonViewedProjectOntology.typeDef),
ObjectProperty(viewedDatasetProperty, PersonViewedDatasetOntology.typeDef)
)
}

Expand All @@ -61,3 +63,20 @@ object PersonViewedProjectOntology {
)
)
}

object PersonViewedDatasetOntology {

val classType: Property = renku / "ViewedDataset"
val datasetProperty: Property = renku / "dataset"
val dateViewedProperty: DataProperty.Def = DataProperty(renku / "dateViewed", xsd / "dateTime")

lazy val typeDef: Type = Type.Def(
Class(classType),
ObjectProperties(
ObjectProperty(datasetProperty, Dataset.Ontology.typeDef)
),
DataProperties(
dateViewedProperty
)
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,41 +16,27 @@
* limitations under the License.
*/

package io.renku.entities.viewings.collector.projects
package io.renku.entities.viewings.collector
package persons

import io.renku.graph.model.{persons, projects, GraphClass}
import io.renku.graph.model.GraphClass
import io.renku.jsonld._
import io.renku.jsonld.syntax._

private object ProjectViewingEncoder {
private object Encoder {

final case class ProjectViewing(projectId: projects.ResourceId, dateViewed: projects.DateViewed)

final case class Project(id: projects.ResourceId, path: projects.Path)

final case class PersonViewedProject(userId: persons.ResourceId, project: Project, dateViewed: projects.DateViewed)

def encode(viewing: ProjectViewing): NamedGraph =
def encode(viewing: PersonViewedProject): NamedGraph =
NamedGraph.fromJsonLDsUnsafe(
GraphClass.ProjectViewedTimes.id,
GraphClass.PersonViewings.id,
viewing.asJsonLD
)

def encode(viewing: PersonViewedProject): NamedGraph =
def encode(viewing: PersonViewedDataset): NamedGraph =
NamedGraph.fromJsonLDsUnsafe(
GraphClass.PersonViewings.id,
viewing.asJsonLD
)

private implicit lazy val projectViewingEncoder: JsonLDEncoder[ProjectViewing] =
JsonLDEncoder.instance { case ProjectViewing(entityId, date) =>
JsonLD.entity(
entityId.asEntityId,
EntityTypes of ProjectViewedTimeOntology.classType,
ProjectViewedTimeOntology.dataViewedProperty.id -> date.asJsonLD
)
}

private implicit lazy val personViewedProjectEncoder: JsonLDEncoder[PersonViewedProject] =
JsonLDEncoder.instance { case ev @ PersonViewedProject(userId, _, _) =>
JsonLD.entity(
Expand All @@ -69,4 +55,23 @@ private object ProjectViewingEncoder {
PersonViewedProjectOntology.dateViewedProperty.id -> date.asJsonLD
)
}

private implicit lazy val personViewedDatasetEncoder: JsonLDEncoder[PersonViewedDataset] =
JsonLDEncoder.instance { case ev @ PersonViewedDataset(userId, _, _) =>
JsonLD.entity(
userId.asEntityId,
EntityTypes of PersonViewingOntology.classType,
PersonViewingOntology.viewedDatasetProperty -> ev.asJsonLD(viewedDatasetEncoder)
)
}

private lazy val viewedDatasetEncoder: JsonLDEncoder[PersonViewedDataset] =
JsonLDEncoder.instance { case PersonViewedDataset(userId, Dataset(id, identifier), date) =>
JsonLD.entity(
EntityId.of(s"$userId/datasets/$identifier"),
EntityTypes of PersonViewedProjectOntology.classType,
PersonViewedDatasetOntology.datasetProperty -> id.asJsonLD,
PersonViewedDatasetOntology.dateViewedProperty.id -> date.asJsonLD
)
}
}
Loading

0 comments on commit 858a86e

Please sign in to comment.