diff --git a/_Design/Entity Relationship Diagrams.png b/_Design/Entity Relationship Diagrams.png index b58343e0..ffc1a1a3 100644 Binary files a/_Design/Entity Relationship Diagrams.png and b/_Design/Entity Relationship Diagrams.png differ diff --git a/_Design/Entity Relationship Diagrams.wsd b/_Design/Entity Relationship Diagrams.wsd index 6ef13dee..7b3ff6d6 100644 --- a/_Design/Entity Relationship Diagrams.wsd +++ b/_Design/Entity Relationship Diagrams.wsd @@ -177,12 +177,13 @@ entity "user_groups" { * id : int <> -- * parent_id : int <> - name: nvarchar(255) - type: nvarchar(255) - order: number + * type: nvarchar(255) + * name: nvarchar(255) + * order: number + * acronym: nvarchar(10) last_division_directory_sync_at: datetime2(0) - created_at : datetime2(0) - updated_at : datetime2(0) + * created_at : datetime2(0) + * updated_at : datetime2(0) deleted_at : datetime2(0) } diff --git a/api/src/db/migrations/2024.06.10T17.08.32.add-acronym-to-user-groups.ts b/api/src/db/migrations/2024.06.10T17.08.32.add-acronym-to-user-groups.ts new file mode 100644 index 00000000..375fe8f1 --- /dev/null +++ b/api/src/db/migrations/2024.06.10T17.08.32.add-acronym-to-user-groups.ts @@ -0,0 +1,14 @@ +import { DataTypes } from "sequelize" + +import type { Migration } from "@/db/umzug" + +export const up: Migration = async ({ context: queryInterface }) => { + await queryInterface.addColumn("user_groups", "acronym", { + type: DataTypes.STRING(10), + allowNull: true, + }) +} + +export const down: Migration = async ({ context: queryInterface }) => { + await queryInterface.removeColumn("user_groups", "acronym") +} diff --git a/api/src/db/migrations/2024.06.10T18.01.34.backfill-user-groups-acronyms.ts b/api/src/db/migrations/2024.06.10T18.01.34.backfill-user-groups-acronyms.ts new file mode 100644 index 00000000..db0cc223 --- /dev/null +++ b/api/src/db/migrations/2024.06.10T18.01.34.backfill-user-groups-acronyms.ts @@ -0,0 +1,26 @@ +import { DataTypes } from "sequelize" + +import type { Migration } from "@/db/umzug" +import { UserGroup } from "@/models" +import acronymize from "@/utils/acronymize" + +export const up: Migration = async ({ context: queryInterface }) => { + await UserGroup.findEach(async (userGroup) => { + const acronym = acronymize(userGroup.name) + await userGroup.update({ + acronym, + }) + }) + + await queryInterface.changeColumn("user_groups", "acronym", { + type: DataTypes.STRING(10), + allowNull: false, + }) +} + +export const down: Migration = async ({ context: queryInterface }) => { + await queryInterface.changeColumn("user_groups", "acronym", { + type: DataTypes.STRING(10), + allowNull: true, + }) +} diff --git a/api/src/models/dataset.ts b/api/src/models/dataset.ts index ed75ac5d..ae5e1897 100644 --- a/api/src/models/dataset.ts +++ b/api/src/models/dataset.ts @@ -1,4 +1,4 @@ -import { isNil } from "lodash" +import { isEmpty, isNil } from "lodash" import { Association, BelongsToCreateAssociationMixin, @@ -41,6 +41,7 @@ import { datasetHasApprovedAccessRequestFor, datasetIsAccessibleViaOpenAccessGrantBy, datasetsAccessibleViaAccessGrantsBy, + datasetsSearch, mostPermissiveAccessGrantFor, } from "@/models/datasets" import VisualizationControl from "@/models/visualization-control" @@ -353,6 +354,23 @@ Dataset.init( }, ], scopes: { + search(searchToken: string) { + if (isEmpty(searchToken)) { + return {} + } + + return { + where: { + id: { + [Op.in]: datasetsSearch(), + }, + }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + } + }, accessibleViaAccessGrantsBy(user: User) { return { where: { diff --git a/api/src/models/datasets/datasets-search.ts b/api/src/models/datasets/datasets-search.ts new file mode 100644 index 00000000..fdd6789f --- /dev/null +++ b/api/src/models/datasets/datasets-search.ts @@ -0,0 +1,78 @@ +import { literal } from "sequelize" +import { Literal } from "sequelize/types/utils" + +import { compactSql } from "@/utils/compact-sql" + +/** + * Requires replacements to be passed in to query. + * e.g. { replacements: { searchTokenWildcard: `%${searchToken}%`, searchToken } + */ +export function datasetsSearch(): Literal { + const matchingEntries = compactSql(/*sql*/ ` + ( + SELECT + DISTINCT datasets.id + FROM + datasets + WHERE + datasets.deleted_at IS NULL + AND ( + LOWER(datasets.name) LIKE LOWER(:searchTokenWildcard) + OR LOWER(datasets.description) LIKE LOWER(:searchTokenWildcard) + OR EXISTS ( + SELECT + 1 + FROM + taggings + INNER JOIN tags ON taggings.tag_id = tags.id + AND datasets.id = taggings.taggable_id + AND taggings.taggable_type = 'Dataset' + AND tags.deleted_at IS NULL + AND taggings.deleted_at IS NULL + AND LOWER(tags.name) LIKE LOWER(:searchTokenWildcard) + ) + OR EXISTS ( + SELECT + 1 + FROM + user_groups + INNER JOIN dataset_stewardships ON ( + ( + dataset_stewardships.department_id = user_groups.id + AND user_groups.type = 'department' + ) + OR ( + dataset_stewardships.department_id IS NOT NULL + AND dataset_stewardships.division_id = user_groups.id + AND user_groups.type = 'division' + ) + OR ( + dataset_stewardships.department_id IS NOT NULL + AND dataset_stewardships.division_id IS NOT NULL + AND dataset_stewardships.branch_id = user_groups.id + AND user_groups.type = 'branch' + ) + OR ( + dataset_stewardships.department_id IS NOT NULL + AND dataset_stewardships.division_id IS NOT NULL + AND dataset_stewardships.branch_id IS NOT NULL + AND dataset_stewardships.unit_id = user_groups.id + AND user_groups.type = 'unit' + ) + ) + AND datasets.id = dataset_stewardships.dataset_id + AND dataset_stewardships.deleted_at IS NULL + AND user_groups.deleted_at IS NULL + AND ( + LOWER(user_groups.name) LIKE LOWER(:searchTokenWildcard) + OR user_groups.acronym = :searchToken + ) + ) + ) + ) + `) + + return literal(matchingEntries) +} + +export default datasetsSearch diff --git a/api/src/models/datasets/index.ts b/api/src/models/datasets/index.ts index 0b65cb23..99f4f7ee 100644 --- a/api/src/models/datasets/index.ts +++ b/api/src/models/datasets/index.ts @@ -2,6 +2,7 @@ export { datasetHasApprovedAccessRequestFor } from "./dataset-has-approved-acces export { datasetIsAccessibleViaOpenAccessGrantBy } from "./dataset-is-accessible-via-open-access-grant-by" export { datasetsAccessibleViaAccessGrantsBy } from "./datasets-accessible-via-access-grants-by" export { datasetsAccessibleViaOwner } from "./datasets-accessible-via-owner" +export { datasetsSearch } from "./datasets-search" export { datasetsWithApprovedAccessRequestsFor } from "./datasets-with-approved-access-requests-for" export { datasetsWithFieldExclusionsDisabled } from "./datasets-with-field-exclusions-disabled" export { datasetsWithPreviewDisabled } from "./datasets-with-preview-disabled" diff --git a/api/src/models/tagging.ts b/api/src/models/tagging.ts index 42406ffc..8b59f61a 100644 --- a/api/src/models/tagging.ts +++ b/api/src/models/tagging.ts @@ -27,6 +27,8 @@ export enum TaggableTypes { } export class Tagging extends Model, InferCreationAttributes> { + static readonly TaggableTypes = TaggableTypes + declare id: CreationOptional declare tagId: ForeignKey declare taggableId: number diff --git a/api/src/models/user-groups.ts b/api/src/models/user-groups.ts index c95e9c49..6c4ee9fe 100644 --- a/api/src/models/user-groups.ts +++ b/api/src/models/user-groups.ts @@ -43,8 +43,9 @@ export class UserGroup extends BaseModel< declare id: CreationOptional declare parentId: ForeignKey | null - declare name: string declare type: string + declare name: string + declare acronym: string declare order: number declare lastDivisionDirectorySyncAt: Date | null declare createdAt: CreationOptional @@ -275,6 +276,10 @@ UserGroup.init( type: DataTypes.STRING(255), allowNull: false, }, + acronym: { + type: DataTypes.STRING(10), + allowNull: false, + }, order: { type: DataTypes.INTEGER, allowNull: false, diff --git a/api/src/services/user-groups/sync-service.ts b/api/src/services/user-groups/sync-service.ts index 3ccd821f..eab05705 100644 --- a/api/src/services/user-groups/sync-service.ts +++ b/api/src/services/user-groups/sync-service.ts @@ -1,8 +1,10 @@ -import { UserGroup } from "@/models" +import { isEmpty } from "lodash" -import BaseService from "@/services/base-service" +import acronymize from "@/utils/acronymize" import { yukonGovernmentIntegration } from "@/integrations" +import { UserGroup } from "@/models" import { UserGroupTypes } from "@/models/user-groups" +import BaseService from "@/services/base-service" export const DEFAULT_ORDER = -1 @@ -16,13 +18,18 @@ export class SyncService extends BaseService { const isDivision = branch === null && unit === null const isBranch = unit === null + const departmentName = this.cleanName(department) + if (isEmpty(departmentName)) continue + + const departmentAcronym = acronymize(departmentName) const [userGroup1] = await UserGroup.findOrCreate({ where: { - name: this.cleanName(department), + name: departmentName, type: UserGroupTypes.DEPARTMENT, }, defaults: { - name: this.cleanName(department), + name: departmentName, + acronym: departmentAcronym, type: UserGroupTypes.DEPARTMENT, order: DEFAULT_ORDER, }, @@ -33,15 +40,20 @@ export class SyncService extends BaseService { } if (division !== null) { + const divisionName = this.cleanName(division) + if (isEmpty(divisionName)) continue + + const divisionAcronym = acronymize(divisionName) const [userGroup2] = await UserGroup.findOrCreate({ where: { parentId: userGroup1.id, - name: this.cleanName(division), + name: divisionName, type: UserGroupTypes.DIVISION, }, defaults: { parentId: userGroup1.id, - name: this.cleanName(division), + name: divisionName, + acronym: divisionAcronym, type: UserGroupTypes.DIVISION, order: DEFAULT_ORDER, }, @@ -52,15 +64,20 @@ export class SyncService extends BaseService { } if (branch !== null) { + const branchName = this.cleanName(branch) + if (isEmpty(branchName)) continue + + const branchAcronym = acronymize(branchName) const [userGroup3] = await UserGroup.findOrCreate({ where: { parentId: userGroup2.id, - name: this.cleanName(branch), + name: branchName, type: UserGroupTypes.BRANCH, }, defaults: { parentId: userGroup2.id, - name: this.cleanName(branch), + name: branchName, + acronym: branchAcronym, type: UserGroupTypes.BRANCH, order: DEFAULT_ORDER, }, @@ -71,15 +88,20 @@ export class SyncService extends BaseService { } if (unit !== null) { + const unitName = this.cleanName(unit) + if (isEmpty(unitName)) continue + + const unitAcronym = acronymize(unitName) await UserGroup.findOrCreate({ where: { parentId: userGroup3.id, - name: this.cleanName(unit), + name: unitName, type: UserGroupTypes.UNIT, }, defaults: { parentId: userGroup3.id, - name: this.cleanName(unit), + name: unitName, + acronym: unitAcronym, type: UserGroupTypes.UNIT, order, }, diff --git a/web/src/utils/acronymize.ts b/api/src/utils/acronymize.ts similarity index 100% rename from web/src/utils/acronymize.ts rename to api/src/utils/acronymize.ts diff --git a/api/tests/factories/dataset-integration-factory.ts b/api/tests/factories/dataset-integration-factory.ts index a196a111..47c0e986 100644 --- a/api/tests/factories/dataset-integration-factory.ts +++ b/api/tests/factories/dataset-integration-factory.ts @@ -16,7 +16,7 @@ function assertParamsHasDatasetId( export const datasetIntegrationFactory = BaseFactory.define( ({ sequence, params, onCreate }) => { - onCreate((datasetField) => datasetField.save()) + onCreate((datasetIntegration) => datasetIntegration.save()) assertParamsHasDatasetId(params) diff --git a/api/tests/factories/dataset-stewardship-factory.ts b/api/tests/factories/dataset-stewardship-factory.ts new file mode 100644 index 00000000..e95c8bea --- /dev/null +++ b/api/tests/factories/dataset-stewardship-factory.ts @@ -0,0 +1,57 @@ +import { DeepPartial } from "fishery" + +import { DatasetStewardship } from "@/models" +import BaseFactory from "@/factories/base-factory" + +export const datasetStewardshipFactory = BaseFactory.define( + ({ sequence, params, onCreate }) => { + onCreate((datasetStewardship) => datasetStewardship.save()) + + assertParamsHasDatasetId(params) + assertParamsHasOnwerId(params) + assertParamsHasSupportId(params) + assertParamsHasDepartmentId(params) + + return DatasetStewardship.build({ + id: sequence, + datasetId: params.datasetId, // does not unbrand and cast datasetId to number + ownerId: params.ownerId, + supportId: params.supportId, + departmentId: params.departmentId, + }) + } +) + +export default datasetStewardshipFactory + +function assertParamsHasDatasetId( + params: DeepPartial +): asserts params is DeepPartial & { datasetId: number } { + if (typeof params.datasetId !== "number") { + throw new Error("datasetId is must be a number") + } +} + +function assertParamsHasOnwerId( + params: DeepPartial +): asserts params is DeepPartial & { ownerId: number } { + if (typeof params.ownerId !== "number") { + throw new Error("ownerId is must be a number") + } +} + +function assertParamsHasSupportId( + params: DeepPartial +): asserts params is DeepPartial & { supportId: number } { + if (typeof params.supportId !== "number") { + throw new Error("supportId is must be a number") + } +} + +function assertParamsHasDepartmentId( + params: DeepPartial +): asserts params is DeepPartial & { departmentId: number } { + if (typeof params.departmentId !== "number") { + throw new Error("departmentId is must be a number") + } +} diff --git a/api/tests/factories/helpers.ts b/api/tests/factories/helpers.ts index c6bffe4b..02596a8d 100644 --- a/api/tests/factories/helpers.ts +++ b/api/tests/factories/helpers.ts @@ -1,8 +1,8 @@ import { faker } from "@faker-js/faker" export function anytime() { - const hours = faker.number.int({ min: 0, max: 23 }).toString().padStart(2, '0') - const minutes = faker.number.int({ min: 0, max: 59 }).toString().padStart(2, '0') + const hours = faker.number.int({ min: 0, max: 23 }).toString().padStart(2, "0") + const minutes = faker.number.int({ min: 0, max: 59 }).toString().padStart(2, "0") const seconds = "00" // currently we aren't tracking time to the second return `${hours}:${minutes}:${seconds}` diff --git a/api/tests/factories/index.ts b/api/tests/factories/index.ts index 47bde9b6..2bc95ddc 100644 --- a/api/tests/factories/index.ts +++ b/api/tests/factories/index.ts @@ -6,7 +6,10 @@ export { datasetEntryPreviewFactory } from "./dataset-entry-preview-factory" export { datasetFactory } from "./dataset-factory" export { datasetFieldFactory } from "./dataset-field-factory" export { datasetIntegrationFactory } from "./dataset-integration-factory" +export { datasetStewardshipFactory } from "./dataset-stewardship-factory" export { roleFactory } from "./role-factory" +export { tagFactory } from "./tag-factory" +export { taggingFactory } from "./tagging-factory" export { userFactory } from "./user-factory" export { userGroupFactory } from "./user-group-factory" export { userGroupMembershipFactory } from "./user-group-membership-factory" diff --git a/api/tests/factories/tag-factory.ts b/api/tests/factories/tag-factory.ts new file mode 100644 index 00000000..7a44bb69 --- /dev/null +++ b/api/tests/factories/tag-factory.ts @@ -0,0 +1,16 @@ +import { faker } from "@faker-js/faker" + +import { Tag } from "@/models" + +import BaseFactory from "@/factories/base-factory" + +export const tagFactory = BaseFactory.define(({ sequence, onCreate }) => { + onCreate((tag) => tag.save()) + + return Tag.build({ + id: sequence, + name: faker.lorem.word(), + }) +}) + +export default tagFactory diff --git a/api/tests/factories/tagging-factory.ts b/api/tests/factories/tagging-factory.ts new file mode 100644 index 00000000..596a35b3 --- /dev/null +++ b/api/tests/factories/tagging-factory.ts @@ -0,0 +1,50 @@ +import { DeepPartial } from "fishery" + +import { Tagging } from "@/models" +import { TaggableTypes } from "@/models/tagging" +import BaseFactory from "@/factories/base-factory" + +export const taggingFactory = BaseFactory.define(({ sequence, onCreate, params }) => { + onCreate((tagging) => tagging.save()) + + assertParamsHasTaggableId(params) + assertParamsHasTaggableType(params) + assertParamsHasTagId(params) + + return Tagging.build({ + id: sequence, + taggableId: params.taggableId, + taggableType: params.taggableType, + tagId: params.tagId, + }) +}) + +function assertParamsHasTaggableId( + params: DeepPartial +): asserts params is DeepPartial & { taggableId: number } { + if (typeof params.taggableId !== "number") { + throw new Error("taggableId is must be a number") + } +} + +function assertParamsHasTaggableType( + params: DeepPartial +): asserts params is DeepPartial & { taggableType: TaggableTypes } { + if (params.taggableType === undefined) { + throw new Error("taggableType is required") + } + + if (!Object.values(Tagging.TaggableTypes).includes(params.taggableType)) { + throw new Error("taggableType is must be a TaggableTypes") + } +} + +function assertParamsHasTagId( + params: DeepPartial +): asserts params is DeepPartial & { tagId: number } { + if (typeof params.tagId !== "number") { + throw new Error("tagId is must be a number") + } +} + +export default taggingFactory diff --git a/api/tests/factories/user-group-factory.ts b/api/tests/factories/user-group-factory.ts index 0541be59..17eae565 100644 --- a/api/tests/factories/user-group-factory.ts +++ b/api/tests/factories/user-group-factory.ts @@ -1,5 +1,6 @@ import { faker } from "@faker-js/faker" +import acronymize from "@/utils/acronymize" import { UserGroup } from "@/models" import { DEFAULT_ORDER, UserGroupTypes } from "@/models/user-groups" @@ -13,10 +14,15 @@ export const userGroupFactory = BaseFactory.define(({ sequence, param // TODO: make sure every type except "department" has a parent + const fakeName = faker.lorem.words({ min: 1, max: 3 }) + const name = `${fakeName} ${type} ${sequence}` + const acronym = acronymize(name) + return UserGroup.build({ id: sequence, type, - name: `${type} ${sequence}`, + name, + acronym, order: DEFAULT_ORDER, }) }) diff --git a/api/tests/models/datasets/datasets-search.test.ts b/api/tests/models/datasets/datasets-search.test.ts new file mode 100644 index 00000000..49b6ca85 --- /dev/null +++ b/api/tests/models/datasets/datasets-search.test.ts @@ -0,0 +1,489 @@ +import { Op } from "sequelize" + +import { Dataset } from "@/models" +import { datasetsSearch } from "@/models/datasets" +import { TaggableTypes } from "@/models/tagging" +import { + datasetFactory, + datasetStewardshipFactory, + tagFactory, + taggingFactory, + userFactory, + userGroupFactory, +} from "@/factories" +import { UserGroupTypes } from "@/models/user-groups" + +describe("api/src/models/datasets/datasets-search.ts", () => { + describe(".datasetsSearch", () => { + test("when dataset name matches search token, returns the dataset", async () => { + // Arrange + const datasetOwner = await userFactory.create() + const datasetWithMatchingName = await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + name: "Yukon River Data", + }) + await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + name: "Something that doesn't match", + }) + const searchToken = "River" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: datasetWithMatchingName.id, + }), + ]) + }) + + test("when dataset description matches search token, returns the dataset", async () => { + // Arrange + const datasetOwner = await userFactory.create() + const datasetWithMatchingDescription = await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + description: "Data about rivers in the Yukon", + }) + await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + description: "Something that doesn't match", + }) + const searchToken = "river" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: datasetWithMatchingDescription.id, + }), + ]) + }) + + test("when dataset tag name matches search token, returns the dataset", async () => { + // Arrange + const datasetOwner = await userFactory.create() + const datasetWithMatchingTag = await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + }) + const otherDatasetWithMatchingTag = await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + }) + await datasetFactory.create({ + creatorId: datasetOwner.id, + ownerId: datasetOwner.id, + }) + const matchingTag = await tagFactory.create({ name: "aaaaaaa" }) + const otherMatchingTag = await tagFactory.create({ name: "bbbaaabbb" }) + await taggingFactory.create({ + taggableId: datasetWithMatchingTag.id, + taggableType: TaggableTypes.DATASET, + tagId: matchingTag.id, + }) + await taggingFactory.create({ + taggableId: otherDatasetWithMatchingTag.id, + taggableType: TaggableTypes.DATASET, + tagId: otherMatchingTag.id, + }) + + const searchToken = "aaa" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: datasetWithMatchingTag.id, + }), + expect.objectContaining({ + id: otherDatasetWithMatchingTag.id, + }), + ]) + }) + + test("when dataset stewardship department name matches, returns the dataset", async () => { + // Arrange + const user = await userFactory.create() + const dataset1 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const dataset2 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const department1 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "aaa-bbb-ccc", + }) + const department2 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "bbb-ccc-ddd", + }) + await datasetStewardshipFactory.create({ + datasetId: dataset1.id, + ownerId: user.id, + supportId: user.id, + departmentId: department1.id, + }) + await datasetStewardshipFactory.create({ + datasetId: dataset2.id, + ownerId: user.id, + supportId: user.id, + departmentId: department2.id, + }) + + const searchToken = "bbb" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: dataset1.id, + }), + expect.objectContaining({ + id: dataset2.id, + }), + ]) + }) + + test("when dataset stewardship division name matches, returns the dataset", async () => { + // Arrange + const user = await userFactory.create() + const dataset1 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const dataset2 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const department1 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzz", + }) + const department2 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzzzz", + }) + const division1 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "aaa-bbb-ccc", + }) + const division2 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "bbb-ccc-ddd", + }) + await datasetStewardshipFactory.create({ + datasetId: dataset1.id, + ownerId: user.id, + supportId: user.id, + departmentId: department1.id, + divisionId: division1.id, + }) + await datasetStewardshipFactory.create({ + datasetId: dataset2.id, + ownerId: user.id, + supportId: user.id, + departmentId: department2.id, + divisionId: division2.id, + }) + + const searchToken = "bbb" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: dataset1.id, + }), + expect.objectContaining({ + id: dataset2.id, + }), + ]) + }) + + test("when dataset stewardship branch name matches, returns the dataset", async () => { + // Arrange + const user = await userFactory.create() + const dataset1 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const dataset2 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const department1 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzz", + }) + const department2 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzzzz", + }) + const division1 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "zzzzzzzzzzz", + }) + const division2 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "zzzzzzzzzz", + }) + const branch1 = await userGroupFactory.create({ + type: UserGroupTypes.BRANCH, + name: "aaa-bbb-ccc", + }) + const branch2 = await userGroupFactory.create({ + type: UserGroupTypes.BRANCH, + name: "bbb-ccc-ddd", + }) + await datasetStewardshipFactory.create({ + datasetId: dataset1.id, + ownerId: user.id, + supportId: user.id, + departmentId: department1.id, + divisionId: division1.id, + branchId: branch1.id, + }) + await datasetStewardshipFactory.create({ + datasetId: dataset2.id, + ownerId: user.id, + supportId: user.id, + departmentId: department2.id, + divisionId: division2.id, + branchId: branch2.id, + }) + + const searchToken = "bbb" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: dataset1.id, + }), + expect.objectContaining({ + id: dataset2.id, + }), + ]) + }) + + test("when dataset stewardship unit name matches, returns the dataset", async () => { + // Arrange + const user = await userFactory.create() + const dataset1 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const dataset2 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const department1 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzz", + }) + const department2 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzzzz", + }) + const division1 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "zzzzzzzzzzz", + }) + const division2 = await userGroupFactory.create({ + type: UserGroupTypes.DIVISION, + name: "zzzzzzzzzz", + }) + const branch1 = await userGroupFactory.create({ + type: UserGroupTypes.BRANCH, + name: "zzzzzzzzzz", + }) + const branch2 = await userGroupFactory.create({ + type: UserGroupTypes.BRANCH, + name: "zzzzzzzzzz", + }) + const unit1 = await userGroupFactory.create({ + type: UserGroupTypes.UNIT, + name: "aaa-bbb-ccc", + }) + const unit2 = await userGroupFactory.create({ + type: UserGroupTypes.UNIT, + name: "bbb-ccc-ddd", + }) + await datasetStewardshipFactory.create({ + datasetId: dataset1.id, + ownerId: user.id, + supportId: user.id, + departmentId: department1.id, + divisionId: division1.id, + branchId: branch1.id, + unitId: unit1.id, + }) + await datasetStewardshipFactory.create({ + datasetId: dataset2.id, + ownerId: user.id, + supportId: user.id, + departmentId: department2.id, + divisionId: division2.id, + branchId: branch2.id, + unitId: unit2.id, + }) + + const searchToken = "bbb" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: dataset1.id, + }), + expect.objectContaining({ + id: dataset2.id, + }), + ]) + }) + + test("when dataset stewardship department acronym matches exactly, returns the dataset", async () => { + // Arrange + const user = await userFactory.create() + const dataset1 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + const dataset2 = await datasetFactory.create({ + creatorId: user.id, + ownerId: user.id, + }) + // await datasetFactory.create({ + // creatorId: user.id, + // ownerId: user.id, + // }) + const department1 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzzzz", + acronym: "AAA", + }) + const department2 = await userGroupFactory.create({ + type: UserGroupTypes.DEPARTMENT, + name: "zzzzzzzzzzzz", + acronym: "AAABBB", + }) + await datasetStewardshipFactory.create({ + datasetId: dataset1.id, + ownerId: user.id, + supportId: user.id, + departmentId: department1.id, + }) + await datasetStewardshipFactory.create({ + datasetId: dataset2.id, + ownerId: user.id, + supportId: user.id, + departmentId: department2.id, + }) + + const searchToken = "AAA" + + // Act + const searchQuery = datasetsSearch() + const result = await Dataset.findAll({ + where: { id: { [Op.in]: searchQuery } }, + replacements: { + searchTokenWildcard: `%${searchToken}%`, + searchToken, + }, + }) + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + id: dataset1.id, + }), + ]) + }) + }) +}) diff --git a/api/tests/services/user-groups/sync-service.test.ts b/api/tests/services/user-groups/sync-service.test.ts new file mode 100644 index 00000000..57ea3bdc --- /dev/null +++ b/api/tests/services/user-groups/sync-service.test.ts @@ -0,0 +1,84 @@ +import { SyncService } from "@/services/user-groups" + +import { yukonGovernmentIntegration } from "@/integrations" + +vi.mock("@/integrations", () => ({ + yukonGovernmentIntegration: { + fetchDivisions: vi.fn(), + }, +})) +const mockedYukonGovernmentIntegration = vi.mocked(yukonGovernmentIntegration) + +describe("api/src/services/user-groups/sync-service.ts", () => { + describe("SyncService", () => { + describe("#perform", () => { + test("when fetched divisions includes a department record, creates the correct department", async () => { + // Arrange + const fetchDivisionsResult = { + divisions: [ + { department: "Department 1", division: null, branch: null, unit: null, order: 1 }, + ], + count: 1, + } + mockedYukonGovernmentIntegration.fetchDivisions.mockResolvedValue(fetchDivisionsResult) + + // Act + const result = await SyncService.perform() + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + name: "Department 1", + type: "department", + order: 1, + }), + ]) + }) + + test("when building models, builds the correct acronym", async () => { + // Arrange + const fetchDivisionsResult = { + divisions: [ + { + department: "Highways and Public Works", + division: null, + branch: null, + unit: null, + order: 1, + }, + ], + count: 1, + } + mockedYukonGovernmentIntegration.fetchDivisions.mockResolvedValue(fetchDivisionsResult) + + // Act + const result = await SyncService.perform() + + // Assert + expect(result).toEqual([ + expect.objectContaining({ + name: "Highways and Public Works", + type: "department", + acronym: "HPW", + order: 1, + }), + ]) + }) + + test("when cleaned name is empty, skips creating the user group", async () => { + // Arrange + const fetchDivisionsResult = { + divisions: [{ department: " ", division: null, branch: null, unit: null, order: 1 }], + count: 1, + } + mockedYukonGovernmentIntegration.fetchDivisions.mockResolvedValue(fetchDivisionsResult) + + // Act + const result = await SyncService.perform() + + // Assert + expect(result).toEqual([]) + }) + }) + }) +}) diff --git a/web/tests/utils/acronymize.test.ts b/api/tests/utils/acronymize.test.ts similarity index 100% rename from web/tests/utils/acronymize.test.ts rename to api/tests/utils/acronymize.test.ts diff --git a/bin/dev b/bin/dev index a12aed24..10889e57 100755 --- a/bin/dev +++ b/bin/dev @@ -98,12 +98,14 @@ class DevHelper end def test_api(*args, **kwargs) + reformat_project_relative_path_filter_for_vitest!(args, 'api/') run(*%w[test_api npm run test], *args, **kwargs) end alias test test_api def test_web(*args, **kwargs) + reformat_project_relative_path_filter_for_vitest!(args, 'web/') run(*%w[test_web npm run test], *args, **kwargs) end @@ -232,6 +234,24 @@ class DevHelper `id -g`.strip end + + def reformat_project_relative_path_filter_for_vitest!(args, prefix) + if args.length.positive? && args[0].start_with?(prefix) + src_path_prefix = "#{prefix}src/" + test_path_regex = Regexp.escape(prefix) + src_path_regex = Regexp.escape(src_path_prefix) + + if args[0].start_with?(src_path_prefix) + args[0] = args[0].gsub(%r{^#{src_path_regex}}, 'tests/') + .gsub(%r{\.ts$}, '.test.ts') + # TODO: handle other file types + else + args[0] = args[0].gsub(%r{^#{test_path_regex}}, '') + end + + puts "Reformatted path filter from project relative to service relative for vitest." + end + end end # Only execute main function when file is executed diff --git a/web/src/api/datasets-api.ts b/web/src/api/datasets-api.ts index f99ed33a..b188e69a 100644 --- a/web/src/api/datasets-api.ts +++ b/web/src/api/datasets-api.ts @@ -60,6 +60,7 @@ export type DatasetDetailedResult = Dataset & { // Keep in sync with api/src/models/dataset.ts -> scopes export type DatasetsFilters = { + search?: string withOwnerDepartment?: UserGroup["id"] withTagNames?: string[] } diff --git a/web/src/api/user-groups-api.ts b/web/src/api/user-groups-api.ts index 8a021bfa..858e17b4 100644 --- a/web/src/api/user-groups-api.ts +++ b/web/src/api/user-groups-api.ts @@ -11,6 +11,7 @@ export type UserGroup = { id: number parentId: number name: string + acronym: string type: UserGroupTypes order: number createdAt: string diff --git a/web/src/components/datasets/DatasetsTable.vue b/web/src/components/datasets/DatasetsTable.vue index ef749be6..381c4991 100644 --- a/web/src/components/datasets/DatasetsTable.vue +++ b/web/src/components/datasets/DatasetsTable.vue @@ -10,32 +10,23 @@ @update:page="updatePage" >