Skip to content

Commit

Permalink
chore: prepare 2.48.0 (#1813)
Browse files Browse the repository at this point in the history
  • Loading branch information
jachro authored Dec 5, 2023
2 parents 7994fd5 + d125112 commit 3a7bd7c
Show file tree
Hide file tree
Showing 222 changed files with 3,527 additions and 2,209 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@ object PostgresDB {
max = dbConfig.connectionPool.value
)

def sessionPoolResource[A](dbCfg: DBConfig[_]): Resource[IO, SessionResource[IO, A]] =
sessionPool(dbCfg).map(SessionResource[IO, A](_))
def sessionPoolResource[A](dbCfg: DBConfig[A]): Resource[IO, SessionResource[IO, A]] =
sessionPool(dbCfg).map(SessionResource[IO, A](_, dbCfg))

def initializeDatabase(cfg: DBConfig[_]): IO[Unit] = {
val session = Session.single[IO](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class CrossEntitiesSearchSpec extends AcceptanceSpec with ApplicationServices wi
)
.withDatasets(
datasetEntities(provenanceInternal(cliShapedPersons))
.modify(replaceDSName(sentenceContaining(commonPhrase).generateAs[datasets.Name]))
.modify(replaceDSSlug(sentenceContaining(commonPhrase).generateAs[datasets.Slug]))
)
.generateOne
val project = dataProjects(testProject).map(addMemberWithId(user.id, Role.Owner)).generateOne
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,16 @@ trait DatasetsApiEncoders extends ImageApiEncoders {
"versions": {
"initial": ${dataset.provenance.originalIdentifier.value}
},
"title": ${dataset.identification.title.value},
"name": ${dataset.identification.name.value},
"slug": ${dataset.identification.name.value},
"slug": ${dataset.identification.slug.value},
"images": ${dataset.additionalInfo.images -> projectSlug}
}"""
.deepMerge(
_links(
Rel("details") -> Href(renkuApiUrl / "datasets" / dataset.identification.identifier),
Rel("initial-version") -> Href(renkuApiUrl / "datasets" / dataset.provenance.originalIdentifier),
Rel("tags") -> Href(
renkuApiUrl / "projects" / projectSlug / "datasets" / dataset.identification.name / "tags"
renkuApiUrl / "projects" / projectSlug / "datasets" / dataset.identification.slug / "tags"
)
)
)
Expand Down Expand Up @@ -110,16 +109,15 @@ trait DatasetsApiEncoders extends ImageApiEncoders {
actualResults: List[Json]
): Json = {
val actualIdentifier = actualResults
.findId(dataset.identification.title)
.getOrElse(fail(s"No ${dataset.identification.title} dataset found among the results"))
.findId(dataset.identification.name)
.getOrElse(fail(s"No ${dataset.identification.name} dataset found among the results"))

dataset.identification.identifier shouldBe actualIdentifier

json"""{
"identifier": ${actualIdentifier.value},
"title": ${dataset.identification.title.value},
"name": ${dataset.identification.name.value},
"slug": ${dataset.identification.name.value},
"slug": ${dataset.identification.slug.value},
"published": ${dataset.provenance.creators -> dataset.provenance.date},
"date": ${dataset.provenance.date.instant},
"projectsCount": $projectsCount,
Expand Down Expand Up @@ -153,9 +151,9 @@ trait DatasetsApiEncoders extends ImageApiEncoders {

implicit class JsonsOps(jsons: List[Json]) {

def findId(title: datasets.Title): Option[datasets.Identifier] =
def findId(name: datasets.Name): Option[datasets.Identifier] =
jsons
.find(_.hcursor.downField("title").as[String].fold(throw _, _ == title.toString))
.find(_.hcursor.downField("name").as[String].fold(throw _, _ == name.toString))
.map(_.hcursor.downField("identifier").as[datasets.Identifier].fold(throw _, identity))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ class DatasetsResourcesSpec

val (dataset1, testProject1) = renkuProjectEntities(visibilityPublic, creatorGen = cliShapedPersons)
.modify(removeMembers())
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeTitleContaining(text)))
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeNameContaining(text)))
.generateOne
val project1 = dataProjects(testProject1).map(addMemberWithId(creator.id, Role.Owner)).generateOne

Expand Down Expand Up @@ -251,15 +251,15 @@ class DatasetsResourcesSpec
val project5 = dataProjects(testProject5).map(addMemberWithId(creator.id, Role.Owner)).generateOne
val (_, testProject6Private) = renkuProjectEntities(visibilityPrivate, creatorGen = cliShapedPersons)
.modify(removeMembers())
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeTitleContaining(text)))
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeNameContaining(text)))
.generateOne
val project6CreatorPerson = cliShapedPersons.generateOne
val project6Private = dataProjects(testProject6Private)
.map(replaceCreatorFrom(project6CreatorPerson, creator.id))
.map(addMemberFrom(project6CreatorPerson, creator.id, Role.Owner))
.generateOne

Given("some datasets with title, description, name and author containing some arbitrary chosen text")
Given("some datasets with name, description, slug and author containing some arbitrary chosen text")

pushToStore(project1, creator)
pushToStore(project2, creator)
Expand Down Expand Up @@ -290,11 +290,11 @@ class DatasetsResourcesSpec
)
}

When("user calls the GET knowledge-graph/datasets?query=<text>&sort=title:asc")
When("user calls the GET knowledge-graph/datasets?query=<text>&sort=name:asc")
val searchSortedByName =
knowledgeGraphClient GET s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=title:asc"
knowledgeGraphClient GET s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=name:asc"

Then("he should get OK response with some matching datasets sorted by title ASC")
Then("he should get OK response with some matching datasets sorted by name ASC")
searchSortedByName.status shouldBe Ok

val foundDatasetsSortedByName = searchSortedByName.jsonBody.as[List[Json]].value
Expand All @@ -303,21 +303,21 @@ class DatasetsResourcesSpec
searchResultJson(dataset2, 1, project2.slug, foundDatasetsSortedByName),
searchResultJson(dataset3, 1, project3.slug, foundDatasetsSortedByName),
searchResultJson(dataset4, 2, project4.slug, foundDatasetsSortedByName)
).sortBy(_.hcursor.downField("title").as[String].getOrElse(fail("No 'title' property found")))
).sortBy(_.hcursor.downField("name").as[String].getOrElse(fail("No 'name' property found")))
val datasetsSortedByNameProj4ForkSlug = List(
searchResultJson(dataset1, 1, project1.slug, foundDatasetsSortedByName),
searchResultJson(dataset2, 1, project2.slug, foundDatasetsSortedByName),
searchResultJson(dataset3, 1, project3.slug, foundDatasetsSortedByName),
searchResultJson(dataset4, 2, project4Fork.slug, foundDatasetsSortedByName)
).sortBy(_.hcursor.downField("title").as[String].getOrElse(fail("No 'title' property found")))
).sortBy(_.hcursor.downField("name").as[String].getOrElse(fail("No 'name' property found")))

foundDatasetsSortedByName should {
be(datasetsSortedByNameProj4Slug) or be(datasetsSortedByNameProj4ForkSlug)
}

When("user calls the GET knowledge-graph/datasets?query=<text>&sort=title:asc&page=2&per_page=1")
When("user calls the GET knowledge-graph/datasets?query=<text>&sort=name:asc&page=2&per_page=1")
val searchForPage =
knowledgeGraphClient GET s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=title:asc&page=2&per_page=1"
knowledgeGraphClient GET s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=name:asc&page=2&per_page=1"

Then("he should get OK response with the dataset from the requested page")
val foundDatasetsPage = searchForPage.jsonBody.as[List[Json]].value
Expand All @@ -327,7 +327,7 @@ class DatasetsResourcesSpec
}

When("user calls the GET knowledge-graph/datasets?sort=name:asc")
val searchWithoutPhrase = knowledgeGraphClient GET s"knowledge-graph/datasets?sort=title:asc"
val searchWithoutPhrase = knowledgeGraphClient GET s"knowledge-graph/datasets?sort=name:asc"

Then("he should get OK response with all the datasets")
val foundDatasetsWithoutPhrase = searchWithoutPhrase.jsonBody.as[List[Json]].value
Expand All @@ -338,14 +338,14 @@ class DatasetsResourcesSpec
searchResultJson(dataset3, 1, project3.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset4, 2, project4.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset5WithoutText, 1, project5.slug, foundDatasetsWithoutPhrase)
).sortBy(_.hcursor.downField("title").as[String].getOrElse(fail("No 'title' property found"))) or
).sortBy(_.hcursor.downField("name").as[String].getOrElse(fail("No 'name' property found"))) or
contain allElementsOf List(
searchResultJson(dataset1, 1, project1.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset2, 1, project2.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset3, 1, project3.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset4, 2, project4Fork.slug, foundDatasetsWithoutPhrase),
searchResultJson(dataset5WithoutText, 1, project5.slug, foundDatasetsWithoutPhrase)
).sortBy(_.hcursor.downField("title").as[String].getOrElse(fail("No 'title' property found")))
).sortBy(_.hcursor.downField("name").as[String].getOrElse(fail("No 'name' property found")))
}

When("user uses the response header link with the rel='first'")
Expand Down Expand Up @@ -384,7 +384,7 @@ class DatasetsResourcesSpec

val (dataset1, testProject1) = renkuProjectEntities(visibilityPublic, creatorGen = cliShapedPersons)
.modify(removeMembers())
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeTitleContaining(text)))
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeNameContaining(text)))
.generateOne
val project1CreatorPerson = cliShapedPersons.generateOne
val project1 = dataProjects(testProject1)
Expand All @@ -394,7 +394,7 @@ class DatasetsResourcesSpec

val (_, testProject2Private) = renkuProjectEntities(visibilityPrivate, creatorGen = cliShapedPersons)
.modify(removeMembers())
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeTitleContaining(text)))
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeNameContaining(text)))
.generateOne
val project2CreatorPerson = cliShapedPersons.generateOne
val project2Private = dataProjects(testProject2Private)
Expand All @@ -405,22 +405,22 @@ class DatasetsResourcesSpec
val (dataset3PrivateWithAccess, testProject3PrivateWithAccess) =
renkuProjectEntities(visibilityPrivate, creatorGen = cliShapedPersons)
.modify(removeMembers())
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeTitleContaining(text)))
.addDataset(datasetEntities(provenanceInternal(cliShapedPersons)).modify(_.makeNameContaining(text)))
.generateOne
val project3CreatorPerson = cliShapedPersons.generateOne
val project3PrivateWithAccess = dataProjects(testProject3PrivateWithAccess)
.map(replaceCreatorFrom(project3CreatorPerson, creator.id))
.map(addMemberWithId(user.id, Role.Maintainer) >>> addMemberFrom(project3CreatorPerson, creator.id, Role.Owner))
.generateOne

Given("some datasets with title, description, name and author containing some arbitrary chosen text")
Given("some datasets with slug, description, name and author containing some arbitrary chosen text")
pushToStore(project1, creator)
pushToStore(project2Private, creator)
pushToStore(project3PrivateWithAccess, creator)

When("user calls the GET knowledge-graph/datasets?query=<text>")
val datasetsSearchResponse =
knowledgeGraphClient GET (s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=title:asc", user.accessToken)
knowledgeGraphClient GET (s"knowledge-graph/datasets?query=${urlEncode(text.value)}&sort=name:asc", user.accessToken)

Then("he should get OK response with some matching datasets")
datasetsSearchResponse.status shouldBe Ok
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,13 @@ class ProjectDatasetTagsResourceSpec
}

val commitId = commitIds.generateOne
// mockDataOnGitLabAPIs(project, project.entitiesProject.asJsonLD, commitId)
mockCommitDataOnTripleGenerator(project, toPayloadJsonLD(project), commitId)
gitLabStub.setupProject(project, commitId)
`data in the Triples Store`(project, commitId, accessToken)

When("the user fetches the tags with GET knowledge-graph/projects/:namespace/:name/datasets/:dsName/tags")
val response = knowledgeGraphClient.GET(
s"knowledge-graph/projects/${project.slug}/datasets/${dataset.identification.name}/tags",
s"knowledge-graph/projects/${project.slug}/datasets/${dataset.identification.slug}/tags",
accessToken
)

Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ lazy val tokenRepository = project
.in(file("token-repository"))
.withId("token-repository")
.settings(commonSettings)
.dependsOn(graphCommons % "compile->compile; test->test")
.dependsOn(eventLogApi % "compile->compile; test->test")
.enablePlugins(
JavaAppPackaging,
AutomateHeaderPlugin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
override val entityType: Filters.EntityType = Filters.EntityType.Dataset

private val matchingScoreVar = VarName("matchingScore")
private val slugVar = VarName("slug")
private val nameVar = VarName("name")
private val idsSlugsVisibilitiesVar = VarName("idsSlugsVisibilities")
private val sameAsVar = VarName("sameAs")
Expand All @@ -52,6 +53,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
override val selectVariables: Set[String] = Set(
entityTypeVar,
matchingScoreVar,
slugVar,
nameVar,
idsSlugsVisibilitiesVar,
sameAsVar,
Expand All @@ -70,6 +72,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
fr"""{
|SELECT DISTINCT $entityTypeVar
| $matchingScoreVar
| $slugVar
| $nameVar
| $idsSlugsVisibilitiesVar
| $sameAsVar
Expand Down Expand Up @@ -120,7 +123,8 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
|
| GRAPH schema:Dataset {
| # name
| $sameAsVar renku:slug $nameVar
| $sameAsVar renku:slug $slugVar;
| schema:name $nameVar.
|
| #description
| $description
Expand Down Expand Up @@ -232,7 +236,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
| SELECT $sameAsVar (MAX(?score) AS $matchingScoreVar)
| WHERE {
| Graph schema:Dataset {
| (?id ?score) text:query (renku:slug renku:keywordsConcat schema:description schema:name $luceneQuery).
| (?id ?score) text:query (schema:name renku:slug renku:keywordsConcat schema:description $luceneQuery).
| {
| $sameAsVar a renku:DiscoverableDataset;
| schema:creator ?id
Expand Down Expand Up @@ -280,6 +284,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {

for {
matchingScore <- read[MatchingScore](matchingScoreVar)
slug <- read[datasets.Slug](slugVar)
name <- read[datasets.Name](nameVar)
sameAs <- read[datasets.TopmostSameAs](sameAsVar)
slugAndVisibility <- read[Option[String]](idsSlugsVisibilitiesVar)
Expand All @@ -298,6 +303,7 @@ object DatasetsQuery extends EntityQuery[Entity.Dataset] {
} yield Entity.Dataset(
matchingScore,
sameAs,
slug,
name,
slugAndVisibility._2,
date,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ object model {
final case class Dataset(
matchingScore: MatchingScore,
sameAs: datasets.TopmostSameAs,
slug: datasets.Slug,
name: datasets.Name,
visibility: projects.Visibility,
date: datasets.CreatedOrPublished,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import io.renku.graph.model.{datasets, projects}

private final case class DatasetSearchInfo(topmostSameAs: datasets.TopmostSameAs,
name: datasets.Name,
slug: datasets.Slug,
createdOrPublished: datasets.CreatedOrPublished,
maybeDateModified: Option[datasets.DateModified],
creators: NonEmptyList[Creator],
Expand All @@ -42,6 +43,7 @@ private object DatasetSearchInfo {
implicit val show: Show[DatasetSearchInfo] = Show.show {
case info @ DatasetSearchInfo(topSameAs,
name,
slug,
createdOrPublished,
maybeDateModified,
creators,
Expand All @@ -53,6 +55,7 @@ private object DatasetSearchInfo {
List(
show"topmostSameAs = $topSameAs".some,
show"name = $name".some,
show"slug = $slug".some,
show"visibility = ${info.visibility}".some,
createdOrPublished match {
case d: datasets.DateCreated => show"dateCreated = $d".some
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import io.renku.graph.model.{datasets, projects}

private final case class ModelDatasetSearchInfo(topmostSameAs: datasets.TopmostSameAs,
name: datasets.Name,
slug: datasets.Slug,
createdOrPublished: datasets.CreatedOrPublished,
maybeDateModified: Option[datasets.DateModified],
creators: NonEmptyList[Creator],
Expand All @@ -42,6 +43,7 @@ private object ModelDatasetSearchInfo {
implicit val show: Show[ModelDatasetSearchInfo] = Show.show {
case info @ ModelDatasetSearchInfo(topSameAs,
name,
slug,
createdOrPublished,
maybeDateModified,
creators,
Expand All @@ -53,6 +55,7 @@ private object ModelDatasetSearchInfo {
List(
show"topmostSameAs = $topSameAs".some,
show"name = $name".some,
show"slug = $slug".some,
show"visibility = ${info.visibility}".some,
createdOrPublished match {
case d: datasets.DateCreated => show"dateCreated = $d".some
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ private object ModelSearchInfoExtractor {
) = ModelDatasetSearchInfo(
ds.provenance.topmostSameAs,
ds.identification.name,
ds.identification.slug,
createdOrPublished,
maybeDateModified,
ds.provenance.creators.map(Creator.from),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ private object CalculatorInfoSet {
lazy val toDatasetSearchInfo: DatasetSearchInfo = DatasetSearchInfo(
modelInfo.topmostSameAs,
modelInfo.name,
modelInfo.slug,
modelInfo.createdOrPublished,
modelInfo.maybeDateModified,
modelInfo.creators,
Expand All @@ -125,6 +126,7 @@ private object CalculatorInfoSet {
private def toString(info: ModelDatasetSearchInfo) = List(
show"topmostSameAs = ${info.topmostSameAs}",
show"name = ${info.name}",
show"slug = ${info.slug}",
show"visibility = ${info.link.visibility}",
show"link = ${info.link}"
).mkString(", ")
Expand Down
Loading

0 comments on commit 3a7bd7c

Please sign in to comment.