diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 4e8597e3..977c9a83 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -12,6 +12,6 @@ ## Suggestions on how to test this: -## Is there a release notes update needed for this change?: +## Is there a release notes or changelog update needed for this change?: ## Additional documentation: diff --git a/.gitignore b/.gitignore index dbc1d777..e8782206 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,9 @@ node_modules # unit tests coverage +# macOS +.DS_Store + # ignore npm lock package-json.lock .npmrc \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 00000000..ed1489a7 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +npm run format +npm run typecheck +npm run lint:fix +git add . diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..2ea3e0f7 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,63 @@ +# Changelog + +All notable changes to **Dataverse Client Javascript** are documented here. + +This changelog follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.1.0/) and adheres to [Semantic Versioning](https://semver.org/). This document is intended for developers, contributors, and users who need to understand the technical details. + +## [Unreleased] + +### Added + +### Changed + +### Fixed + +### Removed + +[Unreleased]: https://github.com/IQSS/dataverse-client-javascript/compare/v2.1.0...develop + +--- + +## [v2.1.0] -- 2025-09-29 + +### Added + +- CHANGELOG.md file to track changes in a standard way. + +- New property isAdvancedSearchFieldType returned by API in GetCollectionMetadataBlocks and GetMetadataBlockByName use cases. + +- Use cases for Notifications: GetAllNotifications, DeleteNotification. + +- Use cases for Dataset Linking: LinkDataset, UnlinkDataset, GetDatasetLinkedCollections. + +- Use case: GetCitationInOtherFormats. + +- Use case: GetDatasetAvailableCategories. + +- Use cases for Collections Linking: LinkCollection, UnlinkCollection, GetCollectionLinks. + +- Use cases for External Tools: GetExternalTools, GetDatasetExternalToolResolved, GetFileExternalToolResolved. + +- Use case: GetDatasetTemplates. + +- Use case: GetAvailableStandardLicenses. + +- Use case: GetAvailableDatasetMetadataExportFormats. + +- Use cases for Dataset Types: GetDatasetAvailableDatasetTypes, GetDatasetAvailableDatasetType, AddDatasetType, LinkDatasetTypeWithMetadataBlocks, SetAvailableLicensesForDatasetType, DeleteDatasetType. + +### Changed + +- CreateDataset use case updated to allow non-default dataset types. + +- GetCollectionMetadataBlocks use case updated to support passing a dataset type. + +### Fixed + +- Integration tests in Roles Repository. + +- Incorrect Filter Queries split that caused value parts to be truncated. + +### Security + +- Dependencies updated to address vulnerabilities found by npm audit. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 44a7f046..7e53e952 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,6 +16,29 @@ First of all thank you very much for your interest in contributing to this proje - Unit and integration tests pass - Unit and integration tests for new functionality/fix are added - Documentation is updated (Any new use case added or modified should be documented in the [Use Cases](./docs/useCases.md) section) +- Changelog is updated with your changes in the `[Unreleased]` section of [CHANGELOG.md](./CHANGELOG.md) + +## Maintaining the Changelog + +When contributing to this project, it's important to document your changes in the changelog to help users and developers understand what has been added, changed, fixed, or removed between versions. The changelog helps maintain transparency about project evolution and assists users in understanding the impact of updates. We also have another changelog for design system, so for any design system changes, please include them in that changelog. + +### When to Add Changelog Entries + +**Every pull request should include a changelog entry** + +Add a changelog entry for changes, including: + +- **Added**: New features, components, or functionality +- **Changed**: Changes to existing functionality, API modifications, or package updates +- **Fixed**: Bug fixes and issue resolutions +- **Removed**: Deprecated features or removed functionality + +### How to Add Changelog Entries + +1. **Add your changes to the `[Unreleased]` section** at the top of `CHANGELOG.md` +2. **Categorize your changes** under the appropriate category(Added, Changed, Fixed, Removed) +3. **Write clear, concise descriptions** that help users understand the impact of changes +4. **Include relevant issue numbers** when applicable ## Code of Conduct diff --git a/README.md b/README.md index 4421abdf..44b2266d 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,10 @@ For detailed information about available use cases see [Use Cases Docs](https:// For detailed information about usage see [Usage Docs](https://github.com/IQSS/dataverse-client-javascript/blob/main/docs/usage.md). +## Changelog + +See [CHANGELOG.md](https://github.com/IQSS/dataverse-client-javascript/blob/main/CHANGELOG.md) for a detailed history of changes to this project. + ## Contributing Want to add a new use case or improve an existing one? Please check the [Contributing](https://github.com/IQSS/dataverse-client-javascript/blob/main/CONTRIBUTING.md) section. diff --git a/docs/making-releases.md b/docs/making-releases.md index 13ed7d96..0890f1b2 100644 --- a/docs/making-releases.md +++ b/docs/making-releases.md @@ -4,6 +4,7 @@ - [Regular or Hotfix?](#regular-or-hotfix) - [Create Github Issue and Release Branch](#create-github-issue-and-release-branch) - [Update the version](#update-the-version) +- [Update the changelog](#update-the-changelog) - [Merge "release branch" into "main"](#merge-release-branch-into-main) - [Publish the Dataverse Client Javascript package](#publish-the-dataverse-client-javascript-package) - [Create a Draft Release on GitHub and Tag the Version](#create-a-draft-release-on-github-and-tag-the-version) @@ -40,6 +41,51 @@ npm version 3.5.0 --no-git-tag-version This command will update the version in the `package.json` and `package-lock.json`. +## Update the changelog + +**Note**: Contributors should have already added their changes to the `[Unreleased]` section as part of their pull requests (see [CONTRIBUTING.md](../.github/CONTRIBUTING.md#changelog-guidelines) for details). + +Before releasing, ensure the changelog is properly prepared: + +1. **Review the [Unreleased] section** in `CHANGELOG.md` and `packages/design-system/CHANGELOG.md` +2. **Move entries from [Unreleased] to the new version section**: + + ```markdown + ## [vX.X.X] -- YYYY-MM-DD + + ### Added + + - Feature descriptions from unreleased section + + ### Changed + + - Changes from unreleased section + + ### Fixed + + - Bug fixes from unreleased section + + ### Removed + + - Removals from unreleased section + ``` + +3. **Clear the [Unreleased] section** but keep the structure: + + ```markdown + ## [Unreleased] + + ### Added + + ### Changed + + ### Fixed + + ### Removed + ``` + +4. **Commit the changelog updates** as part of the release preparation + If everything looks good, you can push the changes to the repository. ## Merge "release branch" into "main" @@ -96,7 +142,7 @@ Go to https://github.com/IQSS/dataverse-client-javascript/releases/new to start - Under "Release title" use the same name as the tag such as v3.5.0. -- Add a description of the changes included in this release. You should include a link to the recently published npm version and summarize the key updates, fixes, or features. +- Add a description of the changes included in this release. You should include a link to the recently published npm version and summarize the key updates, fixes, or features. You can copy the content from the corresponding version section in `CHANGELOG.md` for consistency. - Click "Save draft" because we do not want to publish the release yet. diff --git a/docs/useCases.md b/docs/useCases.md index 2ccee9fc..60704c23 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -36,12 +36,23 @@ The different use cases currently available in the package are classified below, - [Get Differences between Two Dataset Versions](#get-differences-between-two-dataset-versions) - [List All Datasets](#list-all-datasets) - [Get Dataset Versions Summaries](#get-dataset-versions-summaries) + - [Get Dataset Linked Collections](#get-dataset-linked-collections) + - [Get Dataset Available Categories](#get-dataset-available-categories) + - [Get Dataset Templates](#get-dataset-templates) + - [Get Dataset Available Dataset Types](#get-dataset-available-dataset-types) + - [Get Dataset Available Dataset Type](#get-dataset-available-dataset-type) - [Datasets write use cases](#datasets-write-use-cases) - [Create a Dataset](#create-a-dataset) - [Update a Dataset](#update-a-dataset) - [Publish a Dataset](#publish-a-dataset) - [Deaccession a Dataset](#deaccession-a-dataset) - [Delete a Draft Dataset](#delete-a-draft-dataset) + - [Link a Dataset](#link-a-dataset) + - [Unlink a Dataset](#unlink-a-dataset) + - [Add a Dataset Type](#add-a-dataset-type) + - [Link Dataset Type with Metadata Blocks](#link-dataset-type-with-metadata-blocks) + - [Set Available Licenses For Dataset Type](#set-available-licenses-for-dataset-type) + - [Delete a Dataset Type](#delete-a-dataset-type) - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get a File](#get-a-file) @@ -61,6 +72,8 @@ The different use cases currently available in the package are classified below, - [Replace a File](#replace-a-file) - [Restrict or Unrestrict a File](#restrict-or-unrestrict-a-file) - [Update File Metadata](#update-file-metadata) + - [Update File Categories](#update-file-categories) + - [Update File Tabular Tags](#update-file-tabular-tags) - [Metadata Blocks](#metadata-blocks) - [Metadata Blocks read use cases](#metadata-blocks-read-use-cases) - [Get All Facetable Metadata Fields](#get-all-facetable-metadata-fields) @@ -83,8 +96,23 @@ The different use cases currently available in the package are classified below, - [Get Maximum Embargo Duration In Months](#get-maximum-embargo-duration-in-months) - [Get ZIP Download Limit](#get-zip-download-limit) - [Get Application Terms of Use](#get-application-terms-of-use) + - [Get Available Dataset Metadata Export Formats](#get-available-dataset-metadata-export-formats) +- [Licenses](#Licenses) + - [Get Available Standard License Terms](#get-available-standard-license-terms) - [Contact](#Contact) - [Send Feedback to Object Contacts](#send-feedback-to-object-contacts) +- [Notifications](#Notifications) + - [Get All Notifications by User](#get-all-notifications-by-user) + - [Delete Notification](#delete-notification) + - [Get Unread Count](#get-unread-count) + - [Mark As Read](#mark-as-read) +- [Search](#Search) + - [Get Search Services](#get-search-services) +- [External Tools](#external-tools) + - [External Tools read use cases](#external-tools-read-use-cases) + - [Get External Tools](#get-external-tools) + - [Get Dataset External Tool Resolved](#get-dataset-external-tool-resolved) + - [Get File External Tool Resolved](#get-file-external-tool-resolved) ## Collections @@ -222,6 +250,8 @@ This use case supports the following optional parameters depending on the search - **limit**: (number) Limit for pagination. - **offset**: (number) Offset for pagination. - **collectionSearchCriteria**: ([CollectionSearchCriteria](../src/collections/domain/models/CollectionSearchCriteria.ts)) Supports filtering the collection items by different properties. +- **searchServiceName**: The search service name on which to execute the search (Optional). +- **showTypeCounts**: If true, the response will include the count per object type (Optional). #### List My Data Collection Items @@ -557,6 +587,37 @@ The `datasetId` parameter can be a string, for persistent identifiers, or a numb There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +#### Get Dataset Citation In Other Formats + +Retrieves the citation for a dataset in a specified bibliographic format. + +##### Example call: + +```typescript +import { getDatasetCitationInOtherFormats } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 2 +const datasetVersionId = '1.0' + +getDatasetCitationInOtherFormats + .execute(datasetId, datasetVersionId, format) + .then((citationText: FormattedCitation) => { + /* ... */ + }) + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetCitationInOtherFormats.ts) implementation_. + +Supported formats include 'EndNote' (XML), 'RIS' (plain text), 'BibTeX' (plain text), 'CSLJson' (JSON), and 'Internal' (HTML). The response contains the raw citation content in the requested format, the format type, and the content type (MIME type). + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + #### Get Dataset Citation Text By Private URL Token Returns the Dataset citation text, given an associated Private URL Token. @@ -735,11 +796,71 @@ _See [use case](../src/datasets/domain/useCases/GetDatasetVersionsSummaries.ts) The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. +#### Get Dataset Linked Collections + +Returns an array of [DatasetLinkedCollection](../src/datasets/domain/models/DatasetLinkedCollection.ts) that contains the collections linked to a dataset. + +##### Example call: + +```typescript +import { getDatasetLinkedCollections } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA' + +getDatasetLinkedCollections + .execute(datasetId) + .then((datasetLinkedCollections: DatasetLinkedCollection[]) => { + /* ... */ + }) + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetLinkedCollections.ts) implementation_. + +#### Get Dataset Available Dataset Types + +Returns a list of available dataset types that can be used at dataset creation. By default, only the type "dataset" is returned. + +###### Example call: + +```typescript +import { getDatasetAvailableDatasetTypes } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getDatasetAvailableDatasetTypes.execute().then((datasetTypes: DatasetType[]) => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetAvailableDatasetTypes.ts) implementation_. + +#### Get Dataset Available Dataset Type + +Returns an available dataset types that can be used at dataset creation. + +###### Example call: + +```typescript +import { getDatasetAvailableDatasetType } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getDatasetAvailableDatasetType.execute().then((datasetType: DatasetType) => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetAvailableDatasetType.ts) implementation_. + ### Datasets Write Use Cases #### Create a Dataset -Creates a new Dataset in a collection, given a [DatasetDTO](../src/datasets/domain/dtos/DatasetDTO.ts) object and an optional collection identifier, which defaults to `:root`. +Creates a new Dataset in a collection, given a [DatasetDTO](../src/datasets/domain/dtos/DatasetDTO.ts) object, an optional collection identifier, which defaults to `:root`, and an optional dataset type. This use case validates the submitted fields of each metadata block and can return errors of type [ResourceValidationError](../src/core/domain/useCases/validators/errors/ResourceValidationError.ts), which include sufficient information to determine which field value is invalid and why. @@ -794,7 +915,7 @@ createDataset.execute(datasetDTO).then((newDatasetIds: CreatedDatasetIdentifiers _See [use case](../src/datasets/domain/useCases/CreateDataset.ts) implementation_. -The above example creates the new dataset in the root collection since no collection identifier is specified. If you want to create the dataset in a different collection, you must add the collection identifier as a second parameter in the use case call. +The above example creates the new dataset in the root collection since no collection identifier is specified. If you want to create the dataset in a different collection, you must add the collection identifier as a second parameter in the use case call. If you want the dataset type to be anything other than dataset, first [check available dataset types](#get-dataset-available-dataset-types) and then add the name of the dataset type as the third parameter. The use case returns a [CreatedDatasetIdentifiers](../src/datasets/domain/models/CreatedDatasetIdentifiers.ts) object, which includes the persistent and numeric identifiers of the created dataset. @@ -944,6 +1065,48 @@ The `datasetId` parameter is a number for numeric identifiers or string for pers If you try to delete a dataset without draft version, you will get a not found error. +#### Link a Dataset + +Creates a link between a Dataset and a Collection. + +##### Example call: + +```typescript +import { linkDataset } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 1 +const collectionAlias = 'collection-alias' + +linkDataset.execute(datasetId, collectionAlias) + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/LinkDataset.ts) implementation_. + +#### Unlink a Dataset + +Removes a link between a Dataset and a Collection. + +##### Example call: + +```typescript +import { unlinkDataset } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 1 +const collectionAlias = 'collection-alias' + +unlinkDataset.execute(datasetId, collectionAlias) + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/UnlinkDataset.ts) implementation_. + #### Get Download Count of a Dataset Total number of downloads requested for a dataset, given a dataset numeric identifier, @@ -976,6 +1139,118 @@ The `includeMDC` parameter is optional. - If MDC isn't enabled, the download count will return a total count, without `MDCStartDate`. - If MDC is enabled but the `includeMDC` is false, the count will be limited to the time before `MDCStartDate` +#### Get Dataset Available Categories + +Returns a list of available file categories that may be applied to the files of a given dataset. + +###### Example call: + +```typescript +import { getDatasetAvailableCategories } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 1 + +getDatasetAvailableCategories.execute(datasetId).then((categories: String[]) => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetAvailableCategories.ts) implementation_. + +The `datasetId` parameter is a number for numeric identifiers or string for persistent identifiers. + +#### Get Dataset Templates + +Returns a [DatasetTemplate](../src/datasets/domain/models/DatasetTemplate.ts) array containing the dataset templates of the requested collection, given the collection identifier or alias. + +##### Example call: + +```typescript +import { getDatasetTemplates } from '@iqss/dataverse-client-javascript' + +const collectionIdOrAlias = 12345 + +getDatasetTemplates.execute(collectionIdOrAlias).then((datasetTemplates: DatasetTemplate[]) => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetTemplates.ts)_ definition. + +#### Add a Dataset Type + +Adds a dataset types that can be used at dataset creation. + +###### Example call: + +```typescript +import { addDatasetType } from '@iqss/dataverse-client-javascript' + +/* ... */ + +addDatasetType.execute(datasetType).then((datasetType: DatasetType) => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/AddDatasetType.ts) implementation_. + +#### Link Dataset Type with Metadata Blocks + +Link a dataset type with metadata blocks. + +###### Example call: + +```typescript +import { linkDatasetTypeWithMetadataBlocks } from '@iqss/dataverse-client-javascript' + +/* ... */ + +linkDatasetTypeWithMetadataBlocks.execute(datasetTypeId, ['geospatial']).then(() => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/LinkDatasetTypeWithMetadataBlocks.ts) implementation_. + +#### Set Available Licenses For Dataset Type + +Set available licenses for dataset type. + +###### Example call: + +```typescript +import { setAvailableLicensesForDatasetType } from '@iqss/dataverse-client-javascript' + +/* ... */ + +setAvailableLicensesForDatasetType.execute(datasetTypeId, ['CC BY 4.0']).then(() => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/SetAvailableLicensesForDatasetType.ts) implementation_. + +#### Delete a Dataset Type + +Delete a dataset type. + +###### Example call: + +```typescript +import { deleteDatasetType } from '@iqss/dataverse-client-javascript' + +/* ... */ + +deleteDatasetType.execute(datasetTypeId).then(() => { + /* ... */ +}) +``` + +_See [use case](../src/datasets/domain/useCases/DeleteDatasetType.ts) implementation_. + ## Files ### Files read use cases @@ -1737,6 +2012,8 @@ The `collectionIdOrAlias` is a generic collection identifier, which can be eithe There is a second optional parameter called `onlyDisplayedOnCreate` which indicates whether or not to return only the metadata blocks that are displayed on dataset creation. The default value is false. +There is a third optional parameter called `datasetType` which will include additional fields from metadata blocks linked to the provided type, if any. Before using this parameter, you will probably want to [list available dataset types](#get-dataset-available-dataset-types) for your installation. + ## Users ### Users read use cases @@ -1955,6 +2232,51 @@ getApplicationTermsOfUse.execute().then((termsOfUse: string) => { _See [use case](../src/info/domain/useCases/GetApplicationTermsOfUse.ts) implementation_. +#### Get Available Dataset Metadata Export Formats + +Returns a [DatasetMetadataExportFormats](../src/info/domain/models/DatasetMetadataExportFormats.ts) object containing the available dataset metadata export formats. + +##### Example call: + +```typescript +import { + getAvailableDatasetMetadataExportFormats, + DatasetMetadataExportFormats +} from '@iqss/dataverse-client-javascript' + +/* ... */ + +getAvailableDatasetMetadataExportFormats + .execute() + .then((datasetMetadataExportFormats: DatasetMetadataExportFormats) => { + /* ... */ + }) + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetAvailableDatasetMetadataExportFormats.ts) implementation_. + +## Licenses + +### Get Available Standard License Terms + +Returns a list of available standard licenses that can be selected for a dataset. + +##### Example call: + +```typescript +import { getAvailableStandardLicenses, License } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getAvailableStandardLicenses.execute().then((licenses: License[]) => { + /* ... */ +}) +``` + +_See [use case](../src/licenses/domain/useCases/GetAvailableStandardLicenses.ts) implementation_. + ## Contact #### Send Feedback to Object Contacts @@ -1991,3 +2313,189 @@ In ContactDTO, it takes the following information: - **subject**: the email subject line. - **body**: the email body to send. - **fromEmail**: the email to list in the reply-to field. + +## Notifications + +#### Get All Notifications by User + +Returns a [Notification](../src/notifications/domain/models/Notification.ts) array containing all notifications for the current authenticated user. + +##### Example call: + +```typescript +import { getAllNotificationsByUser } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getAllNotificationsByUser.execute().then((notifications: Notification[]) => { + /* ... */ +}) + +/* ... */ +``` + +_See [use case](../src/notifications/domain/useCases/GetAllNotificationsByUser.ts) implementation_. + +#### Delete Notification + +Deletes a specific notification for the current authenticated user by its ID. + +##### Example call: + +```typescript +import { deleteNotification } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const notificationId = 123 + +deleteNotification.execute(notificationId: number).then(() => { + /* ... */ +}) + +/* ... */ +``` + +_See [use case](../src/notifications/domain/useCases/DeleteNotification.ts) implementation_. + +#### Get Unread Count + +Returns the number of unread notifications for the current authenticated user. + +##### Example call: + +```typescript +import { getUnreadNotificationsCount } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getUnreadNotificationsCount.execute().then((count: number) => { + console.log(`You have ${count} unread notifications`) +}) + +/* ... */ +``` + +_See [use case](../src/notifications/domain/useCases/GetUnreadNotificationsCount.ts) implementation_. + +#### Mark As Read + +Marks a specific notification as read for the current authenticated user. This operation is idempotent - marking an already-read notification as read will not cause an error. + +##### Example call: + +```typescript +import { markNotificationAsRead } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const notificationId = 123 + +markNotificationAsRead.execute(notificationId).then(() => { + console.log('Notification marked as read') +}) + +/* ... */ +``` + +_See [use case](../src/notifications/domain/useCases/MarkNotificationAsRead.ts) implementation_. + +## Search + +#### Get Search Services + +Returns all [Search Services](../src/search/domain/models/SearchService.ts) available in the installation. + +##### Example call: + +```typescript +import { getSearchServices } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getSearchServices.execute().then((searchServices: SearchService[]) => { + /* ... */ +}) + +/* ... */ +``` + +_See [use case](../src/search/domain/useCases/GetSearchServices.ts) implementation_. + +## External Tools + +### External Tools Read Use Cases + +#### Get External Tools + +Returns an array of [ExternalTool](../src/externalTools/domain/models/ExternalTool.ts) objects, which represent the external tools available in the installation. + +##### Example call: + +```typescript +import { getExternalTools } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getExternalTools.execute().then((externalTools: ExternalTool[]) => { + /* ... */ +}) + +/* ... */ +``` + +_See [use case](../src/externalTools/domain/useCases/GetExternalTools.ts) implementation_. + +#### Get Dataset External Tool Resolved + +Returns an instance of [DatasetExternalToolResolved](../src/externalTools/domain/models/ExternalTool.ts), which contains the resolved URL for accessing an external tool that operates at the dataset level. + +##### Example call: + +```typescript +import { getDatasetExternalToolResolved } from '@iqss/dataverse-client-javascript' + +/* ... */ +const toolId = 1 +const datasetId = 2 +const getExternalToolDTO: GetExternalToolDTO = { + preview: true, + locale: 'en' +} + +getDatasetExternalToolResolved + .execute(toolId, datasetId, getExternalToolDTO) + .then((datasetExternalToolResolved: DatasetExternalToolResolved) => { + /* ... */ + }) +/* ... */ +``` + +_See [use case](../src/externalTools/domain/useCases/GetDatasetExternalToolResolved.ts) implementation_. + +#### Get File External Tool Resolved + +Returns an instance of [FileExternalToolResolved](../src/externalTools/domain/models/ExternalTool.ts), which contains the resolved URL for accessing an external tool that operates at the file level. + +##### Example call: + +```typescript +import { getFileExternalToolResolved } from '@iqss/dataverse-client-javascript' + +/* ... */ +const toolId = 1 +const fileId = 2 +const getExternalToolDTO: GetExternalToolDTO = { + preview: true, + locale: 'en' +} + +getFileExternalToolResolved + .execute(toolId, fileId, getExternalToolDTO) + .then((fileExternalToolResolved: FileExternalToolResolved) => { + /* ... */ + }) +/* ... */ +``` + +_See [use case](../src/externalTools/domain/useCases/GetfileExternalToolResolved.ts) implementation_. diff --git a/package-lock.json b/package-lock.json index fed98650..40941f67 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,17 +1,17 @@ { "name": "@iqss/dataverse-client-javascript", - "version": "2.0.0", + "version": "2.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@iqss/dataverse-client-javascript", - "version": "2.0.0", + "version": "2.1.0", "license": "MIT", "dependencies": { "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", - "axios": "^1.7.2", + "axios": "^1.12.2", "turndown": "^7.1.2", "typescript": "^4.9.5" }, @@ -27,9 +27,9 @@ "eslint-plugin-prettier": "4.2.1", "eslint-plugin-simple-import-sort": "10.0.0", "eslint-plugin-unused-imports": "2.0.0", + "husky": "9.1.7", "jest": "^29.4.3", "jest-environment-jsdom": "29.7.0", - "pre-commit": "1.2.2", "prettier": "2.8.4", "testcontainers": "^10.11.0", "ts-jest": "^29.0.5", @@ -50,12 +50,15 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz", - "integrity": "sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/highlight": "^7.18.6" + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" @@ -107,24 +110,27 @@ "dev": true }, "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.5.tgz", - "integrity": "sha512-SrKK/sRv8GesIW1bDagf9cCG38IOMYZusoe1dfg0D8aiUe3Amvoj1QtjTPAWcfrZFvIwlleLb0gxzQidL9w14w==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.21.5", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", - "jsesc": "^2.5.1" + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" @@ -159,10 +165,11 @@ } }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -182,27 +189,12 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-function-name": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", - "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", "dev": true, - "dependencies": { - "@babel/template": "^7.20.7", - "@babel/types": "^7.21.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", - "dev": true, - "dependencies": { - "@babel/types": "^7.18.6" - }, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -272,19 +264,21 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.21.5.tgz", - "integrity": "sha512-5pTUx3hAJaZIdW99sJ6ZUUgWq/Y+Hja7TowEnLNMm1VivRgZQL3vpBY3qUACVsvw+yQU6+YgfBVmcbLaZtrA1w==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -299,100 +293,28 @@ } }, "node_modules/@babel/helpers": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.5.tgz", - "integrity": "sha512-BSY+JSlHxOmGsPTydUkPf1MdMQ3M81x5xGCOVgWM3G8XH77sJ292Y2oqcp0CbbgxhqBuI46iUz1tT7hqP7EfgA==", - "dev": true, - "dependencies": { - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.5", - "@babel/types": "^7.21.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", "dev": true, + "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" + "@babel/types": "^7.28.4" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/parser": { - "version": "7.21.8", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.8.tgz", - "integrity": "sha512-6zavDGdzG3gUqAdWvlLFfk+36RilI+Pwyuuh7HItyeScCWP3k6i8vKclAQ0bM/0y/Kz/xiwvxhMv9MgTJP5gmA==", - "dev": true, "bin": { "parser": "bin/babel-parser.js" }, @@ -578,58 +500,48 @@ } }, "node_modules/@babel/template": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", - "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.5.tgz", - "integrity": "sha512-AhQoI3YjWi6u/y/ntv7k48mcrCXmus0t79J9qPNlk/lAsFlCiJ047RmbfMOawySTHtywXhbXgpx/8nXMYd+oFw==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.21.4", - "@babel/generator": "^7.21.5", - "@babel/helper-environment-visitor": "^7.21.5", - "@babel/helper-function-name": "^7.21.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.5", - "@babel/types": "^7.21.5", - "debug": "^4.1.0", - "globals": "^11.1.0" + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/types": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.5.tgz", - "integrity": "sha512-m4AfNvVF2mVC/F7fDEdH2El3HzUg9It/XsCxZiOTTA3m3qYfcSVSbTfM6Q9xG+hYDniZssYhlXKKUMD5m8tF4Q==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.21.5", - "@babel/helper-validator-identifier": "^7.19.1", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -639,7 +551,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", @@ -712,10 +625,63 @@ "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", "dev": true, + "license": "MIT", "engines": { "node": ">=14" } }, + "node_modules/@grpc/grpc-js": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.0.tgz", + "integrity": "sha512-N8Jx6PaYzcTRNzirReJCtADVoq4z7+1KQ4E70jTg/koQiMoUSN1kbNjPOqpPbhMFhfU1/l7ixspPl8dNY+FoUg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.8.0", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/grpc-js/node_modules/@grpc/proto-loader": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz", + "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.5.3", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.15", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.15.tgz", + "integrity": "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.8", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", @@ -1232,17 +1198,14 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" } }, "node_modules/@jridgewell/resolve-uri": { @@ -1254,36 +1217,34 @@ "node": ">=6.0.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", - "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/resolve-uri": "3.1.0", - "@jridgewell/sourcemap-codec": "1.4.14" + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", @@ -1330,6 +1291,80 @@ "node": ">=14" } }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "dev": true, + "license": "BSD-3-Clause" + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -1433,16 +1468,18 @@ "resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz", "integrity": "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "node_modules/@types/dockerode": { - "version": "3.3.31", - "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.31.tgz", - "integrity": "sha512-42R9eoVqJDSvVspV89g7RwRqfNExgievLNWoHkg7NoWIqAmavIbgQBb4oc0qRtHkxE+I3Xxvqv7qVXFABKPBTg==", + "version": "3.3.44", + "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.44.tgz", + "integrity": "sha512-fUpIHlsbYpxAJb285xx3vp7q5wf5mjqSn3cYwl/MhiM+DB99OdO5sOCPlO0PjO+TyOtphPs7tMVLU/RtOo/JjA==", "dev": true, + "license": "MIT", "dependencies": { "@types/docker-modem": "*", "@types/node": "*", @@ -1533,10 +1570,11 @@ "dev": true }, "node_modules/@types/ssh2": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.0.tgz", - "integrity": "sha512-YcT8jP5F8NzWeevWvcyrrLB3zcneVjzYY9ZDSMAMboI+2zR1qYWFhwsyOFVzT7Jorn67vqxC0FRiw8YyG9P1ww==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.5.tgz", + "integrity": "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "^18.11.18" } @@ -2267,10 +2305,11 @@ } }, "node_modules/archiver-utils/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -2390,17 +2429,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/archiver/node_modules/tar-stream": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", - "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", - "dev": true, - "dependencies": { - "b4a": "^1.6.4", - "fast-fifo": "^1.2.0", - "streamx": "^2.15.0" - } - }, "node_modules/arg": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", @@ -2554,12 +2582,13 @@ } }, "node_modules/axios": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", - "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -2667,49 +2696,92 @@ "dev": true }, "node_modules/bare-events": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", - "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.7.0.tgz", + "integrity": "sha512-b3N5eTW1g7vXkw+0CXh/HazGTcO5KYuu/RCNaJbDMPI6LHDi+7qe8EmxKUVe1sUbY2KZOVZFyj62x0OEz9qyAA==", "dev": true, - "optional": true + "license": "Apache-2.0" }, "node_modules/bare-fs": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.3.1.tgz", - "integrity": "sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.4.5.tgz", + "integrity": "sha512-TCtu93KGLu6/aiGWzMr12TmSRS6nKdfhAnzTQRbXoSWxkbb9eRd53jQ51jG7g1gYjjtto3hbBrrhzg6djcgiKg==", "dev": true, + "license": "Apache-2.0", "optional": true, "dependencies": { - "bare-events": "^2.0.0", - "bare-path": "^2.0.0", - "bare-stream": "^2.0.0" + "bare-events": "^2.5.4", + "bare-path": "^3.0.0", + "bare-stream": "^2.6.4", + "bare-url": "^2.2.2", + "fast-fifo": "^1.3.2" + }, + "engines": { + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } } }, "node_modules/bare-os": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-2.4.0.tgz", - "integrity": "sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.2.tgz", + "integrity": "sha512-T+V1+1srU2qYNBmJCXZkUY5vQ0B4FSlL3QDROnKQYOqeiQR8UbjNHlPa+TIbM4cuidiN9GaTaOZgSEgsvPbh5A==", "dev": true, - "optional": true + "license": "Apache-2.0", + "optional": true, + "engines": { + "bare": ">=1.14.0" + } }, "node_modules/bare-path": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-2.1.3.tgz", - "integrity": "sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", "dev": true, + "license": "Apache-2.0", "optional": true, "dependencies": { - "bare-os": "^2.1.0" + "bare-os": "^3.0.1" } }, "node_modules/bare-stream": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.1.3.tgz", - "integrity": "sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.7.0.tgz", + "integrity": "sha512-oyXQNicV1y8nc2aKffH+BUHFRXmx6VrPzlnaEvMhram0nPBrKcEdcyBg5r08D0i8VxngHFAiVyn1QKXpSG0B8A==", + "dev": true, + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "streamx": "^2.21.0" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/bare-url": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.2.2.tgz", + "integrity": "sha512-g+ueNGKkrjMazDG3elZO1pNs3HY5+mMmOet1jtKyhOaCnkLzitxf26z7hoAEkDNgdNmnc1KIlt/dw6Po6xZMpA==", "dev": true, + "license": "Apache-2.0", "optional": true, "dependencies": { - "streamx": "^2.18.0" + "bare-path": "^3.0.0" } }, "node_modules/base64-js": { @@ -2746,6 +2818,7 @@ "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "dev": true, + "license": "MIT", "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", @@ -2753,22 +2826,24 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, + "license": "MIT", "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -2842,6 +2917,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -2899,6 +2975,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -2966,7 +3055,8 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/ci-info": { "version": "3.8.0", @@ -3110,51 +3200,6 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, - "node_modules/concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", - "dev": true, - "engines": [ - "node >= 0.8" - ], - "dependencies": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" - } - }, - "node_modules/concat-stream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/concat-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/concat-stream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -3168,15 +3213,15 @@ "dev": true }, "node_modules/cpu-features": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.8.tgz", - "integrity": "sha512-BbHBvtYhUhksqTjr6bhNOjGgMnhwhGTQmOoZGD+K7BCaQDCuZl/Ve1ZxUSMRwVC4D/rkCPQ2MAIeYzrWyK7eEg==", + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "dev": true, "hasInstallScript": true, "optional": true, "dependencies": { "buildcheck": "~0.0.6", - "nan": "^2.17.0" + "nan": "^2.19.0" }, "engines": { "node": ">=10.0.0" @@ -3254,10 +3299,11 @@ "dev": true }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -3432,44 +3478,68 @@ } }, "node_modules/docker-modem": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.8.tgz", - "integrity": "sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.11.0" + "ssh2": "^1.15.0" }, "engines": { "node": ">= 8.0" } }, "node_modules/dockerode": { - "version": "3.3.5", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-3.3.5.tgz", - "integrity": "sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.8.tgz", + "integrity": "sha512-HdPBprWmwfHMHi12AVIFDhXIqIS+EpiOVkZaAZxgML4xf5McqEZjJZtahTPkLDxWOt84ApfWPAH9EoQwOiaAIQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@balena/dockerignore": "^1.0.2", - "docker-modem": "^3.0.0", - "tar-fs": "~2.0.1" + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.3", + "uuid": "^10.0.0" }, "engines": { "node": ">= 8.0" } }, "node_modules/dockerode/node_modules/tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", "dev": true, + "license": "MIT", "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" + } + }, + "node_modules/dockerode/node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" } }, "node_modules/doctrine": { @@ -3502,6 +3572,20 @@ "resolved": "https://registry.npmjs.org/domino/-/domino-2.1.6.tgz", "integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ==" }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -3533,10 +3617,11 @@ "dev": true }, "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "dev": true, + "license": "MIT", "dependencies": { "once": "^1.4.0" } @@ -3615,24 +3700,45 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-errors": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, "engines": { "node": ">= 0.4" } }, "node_modules/es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", - "dev": true, + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -3673,15 +3779,6 @@ "node": ">=6" } }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/escodegen": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", @@ -4178,6 +4275,16 @@ "node": ">=0.8.x" } }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.7.0" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -4303,10 +4410,11 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, + "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -4406,12 +4514,15 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -4422,7 +4533,8 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fs.realpath": { "version": "1.0.0", @@ -4448,7 +4560,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -4499,16 +4610,21 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", - "dev": true, + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -4527,17 +4643,31 @@ } }, "node_modules/get-port": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/get-port/-/get-port-5.1.1.tgz", - "integrity": "sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-7.1.0.tgz", + "integrity": "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==", "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -4649,12 +4779,12 @@ } }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4727,10 +4857,10 @@ } }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true, + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -4742,7 +4872,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, "dependencies": { "has-symbols": "^1.0.3" }, @@ -4754,10 +4883,10 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", "dependencies": { "function-bind": "^1.1.2" }, @@ -4819,6 +4948,22 @@ "node": ">=10.17.0" } }, + "node_modules/husky": { + "version": "9.1.7", + "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz", + "integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==", + "dev": true, + "license": "MIT", + "bin": { + "husky": "bin.js" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/typicode" + } + }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -5110,6 +5255,7 @@ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } @@ -5279,10 +5425,11 @@ } }, "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -5942,7 +6089,8 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/js-yaml": { "version": "4.1.0", @@ -6002,15 +6150,16 @@ } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-parse-even-better-errors": { @@ -6143,6 +6292,13 @@ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "dev": true }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "dev": true, + "license": "MIT" + }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -6155,17 +6311,12 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } + "license": "Apache-2.0" }, "node_modules/make-dir": { "version": "3.1.0", @@ -6183,10 +6334,11 @@ } }, "node_modules/make-dir/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -6206,6 +6358,15 @@ "tmpl": "1.0.5" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -6222,12 +6383,13 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, + "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -6308,7 +6470,8 @@ "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/ms": { "version": "2.1.2", @@ -6317,10 +6480,11 @@ "dev": true }, "node_modules/nan": { - "version": "2.17.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", - "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==", + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.23.0.tgz", + "integrity": "sha512-1UxuyYGdoQHcGg87Lkqm3FzefucTa0NAiOcuRsDmysep3c1LVCRK2krrUDafMWtjSG04htvAmvg96+SDknOmgQ==", "dev": true, + "license": "MIT", "optional": true }, "node_modules/natural-compare": { @@ -6468,15 +6632,6 @@ "node": ">= 0.8.0" } }, - "node_modules/os-shim": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/os-shim/-/os-shim-0.1.3.tgz", - "integrity": "sha512-jd0cvB8qQ5uVt0lvCIexBaROw1KyKm5sbulg2fWOHjETisuCzWyt+eTZKEMs8v6HwzoGs8xik26jg7eCM6pS+A==", - "dev": true, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -6629,10 +6784,11 @@ } }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", - "dev": true + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", @@ -6719,78 +6875,6 @@ "node": ">=8" } }, - "node_modules/pre-commit": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/pre-commit/-/pre-commit-1.2.2.tgz", - "integrity": "sha512-qokTiqxD6GjODy5ETAIgzsRgnBWWQHQH2ghy86PU7mIn/wuWeTwF3otyNQZxWBwVn8XNr8Tdzj/QfUXpH+gRZA==", - "dev": true, - "hasInstallScript": true, - "dependencies": { - "cross-spawn": "^5.0.1", - "spawn-sync": "^1.0.15", - "which": "1.2.x" - } - }, - "node_modules/pre-commit/node_modules/cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", - "dev": true, - "dependencies": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "node_modules/pre-commit/node_modules/lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "node_modules/pre-commit/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pre-commit/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pre-commit/node_modules/which": { - "version": "1.2.14", - "resolved": "https://registry.npmjs.org/which/-/which-1.2.14.tgz", - "integrity": "sha512-16uPglFkRPzgiUXYMi1Jf8Z5EzN1iB4V0ZtMXcHZnwsBtQhhHeCqoWw7tsUY42hJGNDWtUsVLTjakIa5BgAxCw==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/pre-commit/node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", - "dev": true - }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -6908,17 +6992,36 @@ "url": "https://github.com/steveukx/properties?sponsor=1" } }, + "node_modules/protobufjs": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", + "dev": true, + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" }, - "node_modules/pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", - "dev": true - }, "node_modules/psl": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.13.0.tgz", @@ -6929,10 +7032,11 @@ } }, "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", "dev": true, + "license": "MIT", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -6989,12 +7093,6 @@ } ] }, - "node_modules/queue-tick": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", - "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==", - "dev": true - }, "node_modules/react-is": { "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", @@ -7006,6 +7104,7 @@ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -7025,10 +7124,11 @@ } }, "node_modules/readdir-glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -7282,13 +7382,11 @@ } }, "node_modules/semver": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", - "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -7406,22 +7504,12 @@ "source-map": "^0.6.0" } }, - "node_modules/spawn-sync": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/spawn-sync/-/spawn-sync-1.0.15.tgz", - "integrity": "sha512-9DWBgrgYZzNghseho0JOuh+5fg9u6QWhAWa51QC7+U5rCheZ/j1DrEZnyE0RBBRqZ9uEXGPgSSM0nky6burpVw==", - "dev": true, - "hasInstallScript": true, - "dependencies": { - "concat-stream": "^1.4.7", - "os-shim": "^0.1.2" - } - }, "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/sprintf-js": { "version": "1.0.3", @@ -7450,9 +7538,9 @@ } }, "node_modules/ssh2": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.14.0.tgz", - "integrity": "sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -7463,8 +7551,8 @@ "node": ">=10.16.0" }, "optionalDependencies": { - "cpu-features": "~0.0.8", - "nan": "^2.17.0" + "cpu-features": "~0.0.10", + "nan": "^2.23.0" } }, "node_modules/stack-utils": { @@ -7489,17 +7577,15 @@ } }, "node_modules/streamx": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", - "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", "dev": true, + "license": "MIT", "dependencies": { + "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", - "queue-tick": "^1.0.1", "text-decoder": "^1.1.0" - }, - "optionalDependencies": { - "bare-events": "^2.2.0" } }, "node_modules/string_decoder": { @@ -7684,46 +7770,32 @@ "dev": true }, "node_modules/tar-fs": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.6.tgz", - "integrity": "sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.1.tgz", + "integrity": "sha512-LZA0oaPOc2fVo82Txf3gw+AkEd38szODlptMYejQUhndHMLQ9M059uXR+AfS7DNo0NpINvSqDsvyaCrBVkptWg==", "dev": true, + "license": "MIT", "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { - "bare-fs": "^2.1.1", - "bare-path": "^2.1.0" + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" } }, - "node_modules/tar-fs/node_modules/tar-stream": { + "node_modules/tar-stream": { "version": "3.1.7", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", "dev": true, + "license": "MIT", "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, - "node_modules/tar-stream": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", - "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", - "dev": true, - "dependencies": { - "bl": "^4.0.3", - "end-of-stream": "^1.4.1", - "fs-constants": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -7739,26 +7811,27 @@ } }, "node_modules/testcontainers": { - "version": "10.11.0", - "resolved": "https://registry.npmjs.org/testcontainers/-/testcontainers-10.11.0.tgz", - "integrity": "sha512-TYgpR+MjZSuX7kSUxTa0f/CsN6eErbMFrAFumW08IvOnU8b+EoRzpzEu7mF0d29M1ItnHfHPUP44HYiE4yP3Zg==", + "version": "10.28.0", + "resolved": "https://registry.npmjs.org/testcontainers/-/testcontainers-10.28.0.tgz", + "integrity": "sha512-1fKrRRCsgAQNkarjHCMKzBKXSJFmzNTiTbhb5E/j5hflRXChEtHvkefjaHlgkNUjfw92/Dq8LTgwQn6RDBFbMg==", "dev": true, + "license": "MIT", "dependencies": { "@balena/dockerignore": "^1.0.2", - "@types/dockerode": "^3.3.29", + "@types/dockerode": "^3.3.35", "archiver": "^7.0.1", "async-lock": "^1.4.1", "byline": "^5.0.0", "debug": "^4.3.5", "docker-compose": "^0.24.8", - "dockerode": "^3.3.5", - "get-port": "^5.1.1", + "dockerode": "^4.0.5", + "get-port": "^7.1.0", "proper-lockfile": "^4.1.2", "properties-reader": "^2.3.0", "ssh-remote-port-forward": "^1.0.4", - "tar-fs": "^3.0.6", + "tar-fs": "^3.0.7", "tmp": "^0.2.3", - "undici": "^5.28.4" + "undici": "^5.29.0" } }, "node_modules/text-decoder": { @@ -7777,10 +7850,11 @@ "dev": true }, "node_modules/tmp": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", - "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", "dev": true, + "license": "MIT", "engines": { "node": ">=14.14" } @@ -7791,20 +7865,12 @@ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, + "license": "MIT", "dependencies": { "is-number": "^7.0.0" }, @@ -8100,12 +8166,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/typedarray": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", - "dev": true - }, "node_modules/typescript": { "version": "4.9.5", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", @@ -8134,10 +8194,11 @@ } }, "node_modules/undici": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", - "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "dev": true, + "license": "MIT", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -8222,6 +8283,20 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -8384,10 +8459,11 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -8491,12 +8567,6 @@ "node": ">=10" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/yaml": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.0.tgz", diff --git a/package.json b/package.json index bcfedaf3..541fc681 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@iqss/dataverse-client-javascript", - "version": "2.0.0", + "version": "2.1.0", "description": "Dataverse API wrapper package for JavaScript/TypeScript-based applications", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -21,14 +21,8 @@ "lint:prettier": "prettier --check '**/*.(yml|json|md)'", "format": "prettier --write './**/*.{js,ts,md,json,yml,md}' --config ./.prettierrc", "typecheck": "tsc --noEmit", - "git:add": "git add ." + "prepare": "husky" }, - "pre-commit": [ - "format", - "typecheck", - "lint:fix", - "git:add" - ], "repository": { "type": "git", "url": "git+https://github.com/IQSS/dataverse-client-javascript.git" @@ -54,9 +48,9 @@ "eslint-plugin-prettier": "4.2.1", "eslint-plugin-simple-import-sort": "10.0.0", "eslint-plugin-unused-imports": "2.0.0", + "husky": "9.1.7", "jest": "^29.4.3", "jest-environment-jsdom": "29.7.0", - "pre-commit": "1.2.2", "prettier": "2.8.4", "testcontainers": "^10.11.0", "ts-jest": "^29.0.5", @@ -65,7 +59,7 @@ "dependencies": { "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", - "axios": "^1.7.2", + "axios": "^1.12.2", "turndown": "^7.1.2", "typescript": "^4.9.5" } diff --git a/src/collections/domain/models/CollectionLinks.ts b/src/collections/domain/models/CollectionLinks.ts new file mode 100644 index 00000000..40b7799b --- /dev/null +++ b/src/collections/domain/models/CollectionLinks.ts @@ -0,0 +1,8 @@ +import { CollectionSummary } from './CollectionSummary' +import { DatasetSummary } from '../../../datasets/domain/models/DatasetSummary' + +export interface CollectionLinks { + linkedCollections: CollectionSummary[] + collectionsLinkingToThis: CollectionSummary[] + linkedDatasets: DatasetSummary[] +} diff --git a/src/collections/domain/models/CollectionSummary.ts b/src/collections/domain/models/CollectionSummary.ts new file mode 100644 index 00000000..bb4ee24d --- /dev/null +++ b/src/collections/domain/models/CollectionSummary.ts @@ -0,0 +1,5 @@ +export interface CollectionSummary { + id: number + alias: string + displayName: string +} diff --git a/src/collections/domain/repositories/ICollectionsRepository.ts b/src/collections/domain/repositories/ICollectionsRepository.ts index 6c78923e..820a1356 100644 --- a/src/collections/domain/repositories/ICollectionsRepository.ts +++ b/src/collections/domain/repositories/ICollectionsRepository.ts @@ -9,6 +9,7 @@ import { CollectionSearchCriteria } from '../models/CollectionSearchCriteria' import { CollectionUserPermissions } from '../models/CollectionUserPermissions' import { PublicationStatus } from '../../../core/domain/models/PublicationStatus' import { CollectionItemType } from '../../../collections/domain/models/CollectionItemType' +import { CollectionLinks } from '../models/CollectionLinks' export interface ICollectionsRepository { getCollection(collectionIdOrAlias: number | string): Promise @@ -27,6 +28,7 @@ export interface ICollectionsRepository { limit?: number, offset?: number, collectionSearchCriteria?: CollectionSearchCriteria, + searchServiceName?: string, showTypeCounts?: boolean ): Promise getMyDataCollectionItems( @@ -49,4 +51,13 @@ export interface ICollectionsRepository { ): Promise deleteCollectionFeaturedItems(collectionIdOrAlias: number | string): Promise deleteCollectionFeaturedItem(featuredItemId: number): Promise + linkCollection( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise + unlinkCollection( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise + getCollectionLinks(collectionIdOrAlias: number | string): Promise } diff --git a/src/collections/domain/useCases/GetCollectionItems.ts b/src/collections/domain/useCases/GetCollectionItems.ts index 1c85b202..2d8cd10b 100644 --- a/src/collections/domain/useCases/GetCollectionItems.ts +++ b/src/collections/domain/useCases/GetCollectionItems.ts @@ -18,6 +18,7 @@ export class GetCollectionItems implements UseCase { * @param {number} [limit] - Limit for pagination (optional). * @param {number} [offset] - Offset for pagination (optional). * @param {CollectionSearchCriteria} [collectionSearchCriteria] - Supports filtering the collection items by different properties (optional). + * @param {string} [searchServiceName] - The search service name on which to execute the search (optional). * @param {boolean} [showTypeCounts] - If true, the response will include the count per object type (optional). * @returns {Promise} */ @@ -26,6 +27,7 @@ export class GetCollectionItems implements UseCase { limit?: number, offset?: number, collectionSearchCriteria?: CollectionSearchCriteria, + searchServiceName?: string, showTypeCounts = false ): Promise { return await this.collectionsRepository.getCollectionItems( @@ -33,6 +35,7 @@ export class GetCollectionItems implements UseCase { limit, offset, collectionSearchCriteria, + searchServiceName, showTypeCounts ) } diff --git a/src/collections/domain/useCases/GetCollectionLinks.ts b/src/collections/domain/useCases/GetCollectionLinks.ts new file mode 100644 index 00000000..fa0b6c92 --- /dev/null +++ b/src/collections/domain/useCases/GetCollectionLinks.ts @@ -0,0 +1,22 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { ICollectionsRepository } from '../repositories/ICollectionsRepository' +import { CollectionLinks } from '../models/CollectionLinks' + +export class GetCollectionLinks implements UseCase { + private collectionsRepository: ICollectionsRepository + + constructor(collectionsRepository: ICollectionsRepository) { + this.collectionsRepository = collectionsRepository + } + + /** + * Returns a CollectionLinks object containing other collections this collection is linked to, the other collections linking to this collection, and datasets linked to this collection, given the collection identifier or alias. + * + * @param {number | string} [collectionIdOrAlias] - A generic collection identifier, which can be either a string (for queries by CollectionAlias), or a number (for queries by CollectionId) + * If this parameter is not set, the default value is: ':root' + * @returns {Promise} + */ + async execute(collectionId: number | string): Promise { + return await this.collectionsRepository.getCollectionLinks(collectionId) + } +} diff --git a/src/collections/domain/useCases/LinkCollection.ts b/src/collections/domain/useCases/LinkCollection.ts new file mode 100644 index 00000000..4ea0b2b4 --- /dev/null +++ b/src/collections/domain/useCases/LinkCollection.ts @@ -0,0 +1,27 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { ICollectionsRepository } from '../repositories/ICollectionsRepository' + +export class LinkCollection implements UseCase { + private collectionsRepository: ICollectionsRepository + + constructor(collectionsRepository: ICollectionsRepository) { + this.collectionsRepository = collectionsRepository + } + + /** + * Creates a link between two collections. The linked collection will be linked to the linking collection.: + * + * @param {number| string} [linkedCollectionIdOrAlias] - The collection to be linked. Can be either a string (collection alias), or a number (collection id) + * @param { number | string} [linkingCollectionIdOrAlias] - The collection that will be linking to the linked collection. Can be either a string (collection alias), or a number (collection id) + * @returns {Promise} -This method does not return anything upon successful completion. + */ + async execute( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise { + return await this.collectionsRepository.linkCollection( + linkedCollectionIdOrAlias, + linkingCollectionIdOrAlias + ) + } +} diff --git a/src/collections/domain/useCases/UnlinkCollection.ts b/src/collections/domain/useCases/UnlinkCollection.ts new file mode 100644 index 00000000..2f4d4e3e --- /dev/null +++ b/src/collections/domain/useCases/UnlinkCollection.ts @@ -0,0 +1,27 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { ICollectionsRepository } from '../repositories/ICollectionsRepository' + +export class UnlinkCollection implements UseCase { + private collectionsRepository: ICollectionsRepository + + constructor(collectionsRepository: ICollectionsRepository) { + this.collectionsRepository = collectionsRepository + } + + /** + * Unlinks a collection from the collection that links to it + * + * @param {number| string} [linkedCollectionIdOrAlias] - The collection that is linked. Can be either a string (collection alias), or a number (collection id) + * @param { number | string} [linkingCollectionIdOrAlias] - The collection that links to the linked collection. Can be either a string (collection alias), or a number (collection id) + * @returns {Promise} -This method does not return anything upon successful completion. + */ + async execute( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise { + return await this.collectionsRepository.unlinkCollection( + linkedCollectionIdOrAlias, + linkingCollectionIdOrAlias + ) + } +} diff --git a/src/collections/index.ts b/src/collections/index.ts index c275402e..05e49954 100644 --- a/src/collections/index.ts +++ b/src/collections/index.ts @@ -12,6 +12,9 @@ import { DeleteCollectionFeaturedItems } from './domain/useCases/DeleteCollectio import { DeleteCollection } from './domain/useCases/DeleteCollection' import { GetMyDataCollectionItems } from './domain/useCases/GetMyDataCollectionItems' import { DeleteCollectionFeaturedItem } from './domain/useCases/DeleteCollectionFeaturedItem' +import { LinkCollection } from './domain/useCases/LinkCollection' +import { UnlinkCollection } from './domain/useCases/UnlinkCollection' +import { GetCollectionLinks } from './domain/useCases/GetCollectionLinks' const collectionsRepository = new CollectionsRepository() @@ -28,6 +31,9 @@ const updateCollectionFeaturedItems = new UpdateCollectionFeaturedItems(collecti const deleteCollectionFeaturedItems = new DeleteCollectionFeaturedItems(collectionsRepository) const deleteCollection = new DeleteCollection(collectionsRepository) const deleteCollectionFeaturedItem = new DeleteCollectionFeaturedItem(collectionsRepository) +const linkCollection = new LinkCollection(collectionsRepository) +const unlinkCollection = new UnlinkCollection(collectionsRepository) +const getCollectionLinks = new GetCollectionLinks(collectionsRepository) export { getCollection, @@ -42,7 +48,10 @@ export { updateCollectionFeaturedItems, deleteCollectionFeaturedItems, deleteCollection, - deleteCollectionFeaturedItem + deleteCollectionFeaturedItem, + linkCollection, + unlinkCollection, + getCollectionLinks } export { Collection, CollectionInputLevel } from './domain/models/Collection' export { CollectionFacet } from './domain/models/CollectionFacet' diff --git a/src/collections/infra/repositories/CollectionsRepository.ts b/src/collections/infra/repositories/CollectionsRepository.ts index fa0f9872..704367e2 100644 --- a/src/collections/infra/repositories/CollectionsRepository.ts +++ b/src/collections/infra/repositories/CollectionsRepository.ts @@ -3,6 +3,7 @@ import { ICollectionsRepository } from '../../domain/repositories/ICollectionsRe import { transformCollectionFacetsResponseToCollectionFacets, transformCollectionItemsResponseToCollectionItemSubset, + transformCollectionLinksResponseToCollectionLinks, transformCollectionResponseToCollection, transformMyDataResponseToCollectionItemSubset } from './transformers/collectionTransformers' @@ -36,6 +37,7 @@ import { import { ApiConstants } from '../../../core/infra/repositories/ApiConstants' import { PublicationStatus } from '../../../core/domain/models/PublicationStatus' import { ReadError } from '../../../core/domain/repositories/ReadError' +import { CollectionLinks } from '../../domain/models/CollectionLinks' export interface NewCollectionRequestPayload { alias: string @@ -75,7 +77,8 @@ export enum GetCollectionItemsQueryParams { START = 'start', TYPE = 'type', FILTERQUERY = 'fq', - SHOW_TYPE_COUNTS = 'show_type_counts' + SHOW_TYPE_COUNTS = 'show_type_counts', + SEARCH_SERVICE_NAME = 'search_service' } export enum GetMyDataCollectionItemsQueryParams { @@ -168,6 +171,7 @@ export class CollectionsRepository extends ApiRepository implements ICollections limit?: number, offset?: number, collectionSearchCriteria?: CollectionSearchCriteria, + searchServiceName?: string, showTypeCounts?: boolean ): Promise { const queryParams = new URLSearchParams({ @@ -193,6 +197,10 @@ export class CollectionsRepository extends ApiRepository implements ICollections queryParams.set(GetCollectionItemsQueryParams.SHOW_TYPE_COUNTS, 'true') } + if (searchServiceName) { + queryParams.set(GetCollectionItemsQueryParams.SEARCH_SERVICE_NAME, searchServiceName) + } + if (collectionSearchCriteria) { this.applyCollectionSearchCriteriaToQueryParams(queryParams, collectionSearchCriteria) } @@ -349,10 +357,13 @@ export class CollectionsRepository extends ApiRepository implements ICollections if (collectionSearchCriteria?.filterQueries) { collectionSearchCriteria.filterQueries.forEach((filterQuery) => { - const [filterQueryKey, filterQueryValue] = filterQuery.split(':') + const idx = filterQuery.indexOf(':') + if (idx === -1) return // Invalid filter query, skip it - const filterQueryValueWithQuotes = `"${filterQueryValue}"` + const filterQueryKey = filterQuery.substring(0, idx).trim() + const filterQueryValue = filterQuery.substring(idx + 1).trim() + const filterQueryValueWithQuotes = `"${filterQueryValue}"` const filterQueryToSet = `${filterQueryKey}:${filterQueryValueWithQuotes}` queryParams.append(GetCollectionItemsQueryParams.FILTERQUERY, filterQueryToSet) @@ -440,4 +451,38 @@ export class CollectionsRepository extends ApiRepository implements ICollections throw error }) } + public async linkCollection( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise { + return this.doPut( + `/dataverses/${linkedCollectionIdOrAlias}/link/${linkingCollectionIdOrAlias}`, + {} // No data is needed for this operation + ) + .then(() => undefined) + .catch((error) => { + throw error + }) + } + public async unlinkCollection( + linkedCollectionIdOrAlias: number | string, + linkingCollectionIdOrAlias: number | string + ): Promise { + return this.doDelete( + `/dataverses/${linkedCollectionIdOrAlias}/deleteLink/${linkingCollectionIdOrAlias}` + ) + .then(() => undefined) + .catch((error) => { + throw error + }) + } + public async getCollectionLinks(collectionIdOrAlias: number | string): Promise { + return this.doGet(`/${this.collectionsResourceName}/${collectionIdOrAlias}/links`, true) + .then((response) => { + return transformCollectionLinksResponseToCollectionLinks(response) + }) + .catch((error) => { + throw error + }) + } } diff --git a/src/collections/infra/repositories/transformers/collectionTransformers.ts b/src/collections/infra/repositories/transformers/collectionTransformers.ts index 43802706..a26c4718 100644 --- a/src/collections/infra/repositories/transformers/collectionTransformers.ts +++ b/src/collections/infra/repositories/transformers/collectionTransformers.ts @@ -44,6 +44,7 @@ import { PublicationStatusCount } from '../../../domain/models/MyDataCollectionItemSubset' import { PublicationStatus } from '../../../../core/domain/models/PublicationStatus' +import { CollectionLinks } from '../../../domain/models/CollectionLinks' export const transformCollectionResponseToCollection = (response: AxiosResponse): Collection => { const collectionPayload = response.data.data @@ -152,7 +153,19 @@ export const transformCollectionItemsResponseToCollectionItemSubset = ( ...(countPerObjectType && { countPerObjectType }) } } - +export const transformCollectionLinksResponseToCollectionLinks = ( + response: AxiosResponse +): CollectionLinks => { + const responseDataPayload = response.data.data + const linkedCollections = responseDataPayload.linkedDataverses + const collectionsLinkingToThis = responseDataPayload.dataversesLinkingToThis + const linkedDatasets = responseDataPayload.linkedDatasets + return { + linkedCollections, + collectionsLinkingToThis, + linkedDatasets + } +} export const transformMyDataResponseToCollectionItemSubset = ( response: AxiosResponse ): MyDataCollectionItemSubset => { diff --git a/src/datasets/domain/models/CitationFormat.ts b/src/datasets/domain/models/CitationFormat.ts new file mode 100644 index 00000000..56d923d6 --- /dev/null +++ b/src/datasets/domain/models/CitationFormat.ts @@ -0,0 +1,7 @@ +export enum CitationFormat { + Internal = 'Internal', + EndNote = 'EndNote', + RIS = 'RIS', + BibTeX = 'BibTeX', + CSLJson = 'CSL' +} diff --git a/src/datasets/domain/models/Dataset.ts b/src/datasets/domain/models/Dataset.ts index 51f2c433..e858de9e 100644 --- a/src/datasets/domain/models/Dataset.ts +++ b/src/datasets/domain/models/Dataset.ts @@ -1,4 +1,5 @@ import { DvObjectOwnerNode } from '../../../core/domain/models/DvObjectOwnerNode' +import { License } from '../../../licenses' export interface Dataset { id: number @@ -13,6 +14,7 @@ export interface Dataset { citationDate?: string metadataBlocks: DatasetMetadataBlocks isPartOf: DvObjectOwnerNode + datasetType?: string } export interface DatasetVersionInfo { @@ -32,11 +34,7 @@ export enum DatasetVersionState { DEACCESSIONED = 'DEACCESSIONED' } -export interface DatasetLicense { - name: string - uri: string - iconUri?: string -} +export type DatasetLicense = Pick export interface CustomTerms { termsOfUse: string diff --git a/src/datasets/domain/models/DatasetLinkedCollection.ts b/src/datasets/domain/models/DatasetLinkedCollection.ts new file mode 100644 index 00000000..a07e142a --- /dev/null +++ b/src/datasets/domain/models/DatasetLinkedCollection.ts @@ -0,0 +1,5 @@ +export interface DatasetLinkedCollection { + id: number + alias: string + displayName: string +} diff --git a/src/datasets/domain/models/DatasetSummary.ts b/src/datasets/domain/models/DatasetSummary.ts new file mode 100644 index 00000000..a868ad32 --- /dev/null +++ b/src/datasets/domain/models/DatasetSummary.ts @@ -0,0 +1,4 @@ +export interface DatasetSummary { + persistentId: string + title: string +} diff --git a/src/datasets/domain/models/DatasetTemplate.ts b/src/datasets/domain/models/DatasetTemplate.ts new file mode 100644 index 00000000..9be71f23 --- /dev/null +++ b/src/datasets/domain/models/DatasetTemplate.ts @@ -0,0 +1,23 @@ +import { DatasetMetadataBlock, TermsOfUse } from './Dataset' +import { License } from '../../../licenses/domain/models/License' + +export interface DatasetTemplate { + id: number + name: string + collectionAlias: string + isDefault: boolean + usageCount: number + createTime: string + createDate: string + // 👇 From Edit Template Metadata + datasetMetadataBlocks: DatasetMetadataBlock[] + instructions: DatasetTemplateInstruction[] + // 👇 From Edit Template Terms + termsOfUse: TermsOfUse + license?: License // This license property is going to be present if not custom terms are added in the UI +} + +export interface DatasetTemplateInstruction { + instructionField: string + instructionText: string +} diff --git a/src/datasets/domain/models/DatasetType.ts b/src/datasets/domain/models/DatasetType.ts new file mode 100644 index 00000000..56a5ed43 --- /dev/null +++ b/src/datasets/domain/models/DatasetType.ts @@ -0,0 +1,6 @@ +export interface DatasetType { + id?: number + name: string + linkedMetadataBlocks?: string[] + availableLicenses?: string[] +} diff --git a/src/datasets/domain/models/FormattedCitation.ts b/src/datasets/domain/models/FormattedCitation.ts new file mode 100644 index 00000000..1057db64 --- /dev/null +++ b/src/datasets/domain/models/FormattedCitation.ts @@ -0,0 +1,4 @@ +export type FormattedCitation = { + content: string + contentType: string +} diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 66fa4587..e78816c4 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -9,6 +9,11 @@ import { MetadataBlock } from '../../../metadataBlocks' import { DatasetVersionDiff } from '../models/DatasetVersionDiff' import { DatasetDownloadCount } from '../models/DatasetDownloadCount' import { DatasetVersionSummaryInfo } from '../models/DatasetVersionSummaryInfo' +import { DatasetLinkedCollection } from '../models/DatasetLinkedCollection' +import { CitationFormat } from '../models/CitationFormat' +import { FormattedCitation } from '../models/FormattedCitation' +import { DatasetTemplate } from '../models/DatasetTemplate' +import { DatasetType } from '../models/DatasetType' export interface IDatasetsRepository { getDataset( @@ -41,7 +46,8 @@ export interface IDatasetsRepository { createDataset( newDataset: DatasetDTO, datasetMetadataBlocks: MetadataBlock[], - collectionId: string + collectionId: string, + datasetType?: string ): Promise publishDataset(datasetId: number | string, versionUpdateType: VersionUpdateType): Promise updateDataset( @@ -61,4 +67,27 @@ export interface IDatasetsRepository { ): Promise getDatasetVersionsSummaries(datasetId: number | string): Promise deleteDatasetDraft(datasetId: number | string): Promise + linkDataset(datasetId: number, collectionAlias: string): Promise + unlinkDataset(datasetId: number, collectionAlias: string): Promise + getDatasetLinkedCollections(datasetId: number | string): Promise + getDatasetAvailableCategories(datasetId: number | string): Promise + getDatasetCitationInOtherFormats( + datasetId: number | string, + datasetVersionId: string, + format: CitationFormat, + includeDeaccessioned?: boolean + ): Promise + getDatasetTemplates(collectionIdOrAlias: number | string): Promise + getDatasetAvailableDatasetTypes(): Promise + getDatasetAvailableDatasetType(datasetTypeId: number | string): Promise + addDatasetType(datasetType: DatasetType): Promise + linkDatasetTypeWithMetadataBlocks( + datasetTypeId: number | string, + metadataBlocks: string[] + ): Promise + setAvailableLicensesForDatasetType( + datasetTypeId: number | string, + licenses: string[] + ): Promise + deleteDatasetType(datasetTypeId: number): Promise } diff --git a/src/datasets/domain/useCases/AddDatasetType.ts b/src/datasets/domain/useCases/AddDatasetType.ts new file mode 100644 index 00000000..7e2e11c9 --- /dev/null +++ b/src/datasets/domain/useCases/AddDatasetType.ts @@ -0,0 +1,18 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetType } from '../models/DatasetType' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class AddDatasetType implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Add a dataset type that can be selected when creating a dataset. + */ + async execute(datasetType: DatasetType): Promise { + return await this.datasetsRepository.addDatasetType(datasetType) + } +} diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index 65bffae4..090c0721 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -20,6 +20,7 @@ export class CreateDataset extends DatasetWriteUseCase} * @throws {ResourceValidationError} - If there are validation errors related to the provided information. * @throws {ReadError} - If there are errors while reading data. @@ -27,10 +28,16 @@ export class CreateDataset extends DatasetWriteUseCase { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset) this.getNewDatasetValidator().validate(newDataset, metadataBlocks) - return this.getDatasetsRepository().createDataset(newDataset, metadataBlocks, collectionId) + return this.getDatasetsRepository().createDataset( + newDataset, + metadataBlocks, + collectionId, + datasetType + ) } } diff --git a/src/datasets/domain/useCases/DeleteDatasetType.ts b/src/datasets/domain/useCases/DeleteDatasetType.ts new file mode 100644 index 00000000..b3c841aa --- /dev/null +++ b/src/datasets/domain/useCases/DeleteDatasetType.ts @@ -0,0 +1,17 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class DeleteDatasetType implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Deletes a dataset type. + */ + async execute(datasetTypeId: number): Promise { + return await this.datasetsRepository.deleteDatasetType(datasetTypeId) + } +} diff --git a/src/datasets/domain/useCases/GetDatasetAvailableCategories.ts b/src/datasets/domain/useCases/GetDatasetAvailableCategories.ts new file mode 100644 index 00000000..e51544d6 --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetAvailableCategories.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class GetDatasetAvailableCategories implements UseCase { + private readonly datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Retrieves the available file categories for a dataset. + * + * @param {number | string} [datasetId] - Persistent dataset identifier + * @returns {Promise} - List of available file categories + */ + async execute(datasetId: number | string): Promise { + return this.datasetsRepository.getDatasetAvailableCategories(datasetId) + } +} diff --git a/src/datasets/domain/useCases/GetDatasetAvailableDatasetType.ts b/src/datasets/domain/useCases/GetDatasetAvailableDatasetType.ts new file mode 100644 index 00000000..b870216c --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetAvailableDatasetType.ts @@ -0,0 +1,18 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetType } from '../models/DatasetType' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class GetDatasetAvailableDatasetType implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Returns a single available dataset type that can be selected when creating a dataset. + */ + async execute(datasetTypeId: number | string): Promise { + return await this.datasetsRepository.getDatasetAvailableDatasetType(datasetTypeId) + } +} diff --git a/src/datasets/domain/useCases/GetDatasetAvailableDatasetTypes.ts b/src/datasets/domain/useCases/GetDatasetAvailableDatasetTypes.ts new file mode 100644 index 00000000..c7dce4a5 --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetAvailableDatasetTypes.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetType } from '../models/DatasetType' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class GetDatasetAvailableDatasetTypes implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Returns the list of available dataset types that can be selected when creating a dataset. + * + * @returns {Promise} + */ + async execute(): Promise { + return await this.datasetsRepository.getDatasetAvailableDatasetTypes() + } +} diff --git a/src/datasets/domain/useCases/GetDatasetCitationInOtherFormats.ts b/src/datasets/domain/useCases/GetDatasetCitationInOtherFormats.ts new file mode 100644 index 00000000..07bc4fdb --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetCitationInOtherFormats.ts @@ -0,0 +1,36 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' +import { DatasetNotNumberedVersion } from '../models/DatasetNotNumberedVersion' +import { FormattedCitation } from '../models/FormattedCitation' +import { CitationFormat } from '../models/CitationFormat' + +export class GetDatasetCitationInOtherFormats implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Returns the dataset citation in the specified format. + * + * @param {number | string} datasetId - The dataset identifier. + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific string (e.g., '1.0') or a DatasetNotNumberedVersion enum value. Defaults to LATEST. + * @param {CitationFormat} format - The citation format to return. One of: 'EndNote', 'RIS', 'BibTeX', 'CSLJson', 'Internal'. + * @param {boolean} [includeDeaccessioned=false] - Whether to include deaccessioned versions in the search. Defaults to false. + * @returns {Promise} The citation content, format, and content type. + */ + async execute( + datasetId: number | string, + datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, + format: CitationFormat, + includeDeaccessioned = false + ): Promise { + return await this.datasetsRepository.getDatasetCitationInOtherFormats( + datasetId, + datasetVersionId, + format, + includeDeaccessioned + ) + } +} diff --git a/src/datasets/domain/useCases/GetDatasetLinkedCollections.ts b/src/datasets/domain/useCases/GetDatasetLinkedCollections.ts new file mode 100644 index 00000000..ff2a448c --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetLinkedCollections.ts @@ -0,0 +1,21 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetLinkedCollection } from '../models/DatasetLinkedCollection' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class GetDatasetLinkedCollections implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Returns a list of collections linked to a dataset. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ + async execute(datasetId: number | string): Promise { + return await this.datasetsRepository.getDatasetLinkedCollections(datasetId) + } +} diff --git a/src/datasets/domain/useCases/GetDatasetTemplates.ts b/src/datasets/domain/useCases/GetDatasetTemplates.ts new file mode 100644 index 00000000..6878e625 --- /dev/null +++ b/src/datasets/domain/useCases/GetDatasetTemplates.ts @@ -0,0 +1,25 @@ +import { ROOT_COLLECTION_ID } from '../../../collections/domain/models/Collection' +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetTemplate } from '../models/DatasetTemplate' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class GetDatasetTemplates implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Returns a DatasetTemplate array containing the dataset templates of the requested collection, given the collection identifier or alias. + * + * @param {number | string} [collectionIdOrAlias = ':root'] - A generic collection identifier, which can be either a string (for queries by CollectionAlias), or a number (for queries by CollectionId) + * If this parameter is not set, the default value is: ':root' + * @returns {Promise} + */ + async execute( + collectionIdOrAlias: number | string = ROOT_COLLECTION_ID + ): Promise { + return await this.datasetsRepository.getDatasetTemplates(collectionIdOrAlias) + } +} diff --git a/src/datasets/domain/useCases/LinkDataset.ts b/src/datasets/domain/useCases/LinkDataset.ts new file mode 100644 index 00000000..be7f732f --- /dev/null +++ b/src/datasets/domain/useCases/LinkDataset.ts @@ -0,0 +1,21 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class LinkDataset implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Creates a link between a Dataset and a Collection. + * + * @param {number} [datasetId] - The dataset id. + * @param {string} [collectionAlias] - The collection alias. + * @returns {Promise} - This method does not return anything upon successful completion. + */ + async execute(datasetId: number, collectionAlias: string): Promise { + return await this.datasetsRepository.linkDataset(datasetId, collectionAlias) + } +} diff --git a/src/datasets/domain/useCases/LinkDatasetTypeWithMetadataBlocks.ts b/src/datasets/domain/useCases/LinkDatasetTypeWithMetadataBlocks.ts new file mode 100644 index 00000000..f9a6b447 --- /dev/null +++ b/src/datasets/domain/useCases/LinkDatasetTypeWithMetadataBlocks.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class LinkDatasetTypeWithMetadataBlocks implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Links a dataset type with one or more metadata blocks. These metadata blocks will be shown when creating a dataset of this type. + */ + async execute(datasetTypeId: number | string, metadataBlocks: string[]): Promise { + return await this.datasetsRepository.linkDatasetTypeWithMetadataBlocks( + datasetTypeId, + metadataBlocks + ) + } +} diff --git a/src/datasets/domain/useCases/SetAvailableLicensesForDatasetType.ts b/src/datasets/domain/useCases/SetAvailableLicensesForDatasetType.ts new file mode 100644 index 00000000..e4df9ab2 --- /dev/null +++ b/src/datasets/domain/useCases/SetAvailableLicensesForDatasetType.ts @@ -0,0 +1,17 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class SetAvailableLicensesForDatasetType implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Sets the available licenses for a given dataset type. This limits the license options when creating a dataset of this type. + */ + async execute(datasetTypeId: number | string, licenses: string[]): Promise { + return await this.datasetsRepository.setAvailableLicensesForDatasetType(datasetTypeId, licenses) + } +} diff --git a/src/datasets/domain/useCases/UnlinkDataset.ts b/src/datasets/domain/useCases/UnlinkDataset.ts new file mode 100644 index 00000000..d2d8eff5 --- /dev/null +++ b/src/datasets/domain/useCases/UnlinkDataset.ts @@ -0,0 +1,21 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { IDatasetsRepository } from '../repositories/IDatasetsRepository' + +export class UnlinkDataset implements UseCase { + private datasetsRepository: IDatasetsRepository + + constructor(datasetsRepository: IDatasetsRepository) { + this.datasetsRepository = datasetsRepository + } + + /** + * Removes a link between a Dataset and a Collection. + * + * @param {number} [datasetId] - The dataset id. + * @param {string} [collectionAlias] - The collection alias. + * @returns {Promise} - This method does not return anything upon successful completion. + */ + async execute(datasetId: number, collectionAlias: string): Promise { + return await this.datasetsRepository.unlinkDataset(datasetId, collectionAlias) + } +} diff --git a/src/datasets/index.ts b/src/datasets/index.ts index ba1fe5d5..6b93a7cd 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -20,6 +20,18 @@ import { DeaccessionDataset } from './domain/useCases/DeaccessionDataset' import { GetDatasetDownloadCount } from './domain/useCases/GetDatasetDownloadCount' import { GetDatasetVersionsSummaries } from './domain/useCases/GetDatasetVersionsSummaries' import { DeleteDatasetDraft } from './domain/useCases/DeleteDatasetDraft' +import { LinkDataset } from './domain/useCases/LinkDataset' +import { UnlinkDataset } from './domain/useCases/UnlinkDataset' +import { GetDatasetLinkedCollections } from './domain/useCases/GetDatasetLinkedCollections' +import { GetDatasetAvailableCategories } from './domain/useCases/GetDatasetAvailableCategories' +import { GetDatasetAvailableDatasetTypes } from './domain/useCases/GetDatasetAvailableDatasetTypes' +import { GetDatasetAvailableDatasetType } from './domain/useCases/GetDatasetAvailableDatasetType' +import { AddDatasetType } from './domain/useCases/AddDatasetType' +import { LinkDatasetTypeWithMetadataBlocks } from './domain/useCases/LinkDatasetTypeWithMetadataBlocks' +import { SetAvailableLicensesForDatasetType } from './domain/useCases/SetAvailableLicensesForDatasetType' +import { DeleteDatasetType } from './domain/useCases/DeleteDatasetType' +import { GetDatasetCitationInOtherFormats } from './domain/useCases/GetDatasetCitationInOtherFormats' +import { GetDatasetTemplates } from './domain/useCases/GetDatasetTemplates' const datasetsRepository = new DatasetsRepository() @@ -54,6 +66,20 @@ const deaccessionDataset = new DeaccessionDataset(datasetsRepository) const getDatasetDownloadCount = new GetDatasetDownloadCount(datasetsRepository) const getDatasetVersionsSummaries = new GetDatasetVersionsSummaries(datasetsRepository) const deleteDatasetDraft = new DeleteDatasetDraft(datasetsRepository) +const linkDataset = new LinkDataset(datasetsRepository) +const unlinkDataset = new UnlinkDataset(datasetsRepository) +const getDatasetLinkedCollections = new GetDatasetLinkedCollections(datasetsRepository) +const getDatasetAvailableCategories = new GetDatasetAvailableCategories(datasetsRepository) +const getDatasetAvailableDatasetTypes = new GetDatasetAvailableDatasetTypes(datasetsRepository) +const getDatasetAvailableDatasetType = new GetDatasetAvailableDatasetType(datasetsRepository) +const addDatasetType = new AddDatasetType(datasetsRepository) +const linkDatasetTypeWithMetadataBlocks = new LinkDatasetTypeWithMetadataBlocks(datasetsRepository) +const setAvailableLicensesForDatasetType = new SetAvailableLicensesForDatasetType( + datasetsRepository +) +const deleteDatasetType = new DeleteDatasetType(datasetsRepository) +const getDatasetCitationInOtherFormats = new GetDatasetCitationInOtherFormats(datasetsRepository) +const getDatasetTemplates = new GetDatasetTemplates(datasetsRepository) export { getDataset, @@ -71,7 +97,19 @@ export { deaccessionDataset, getDatasetDownloadCount, getDatasetVersionsSummaries, - deleteDatasetDraft + deleteDatasetDraft, + linkDataset, + unlinkDataset, + getDatasetLinkedCollections, + getDatasetAvailableCategories, + getDatasetCitationInOtherFormats, + getDatasetTemplates, + getDatasetAvailableDatasetTypes, + getDatasetAvailableDatasetType, + addDatasetType, + linkDatasetTypeWithMetadataBlocks, + setAvailableLicensesForDatasetType, + deleteDatasetType } export { DatasetNotNumberedVersion } from './domain/models/DatasetNotNumberedVersion' export { DatasetUserPermissions } from './domain/models/DatasetUserPermissions' @@ -105,3 +143,5 @@ export { DatasetVersionSummaryInfo, DatasetVersionSummaryStringValues } from './domain/models/DatasetVersionSummaryInfo' +export { DatasetLinkedCollection } from './domain/models/DatasetLinkedCollection' +export { DatasetType } from './domain/models/DatasetType' diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 036872d6..1545a43d 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -1,3 +1,4 @@ +import { AxiosResponse } from 'axios' import { ApiRepository } from '../../../core/infra/repositories/ApiRepository' import { IDatasetsRepository } from '../../domain/repositories/IDatasetsRepository' import { Dataset, VersionUpdateType } from '../../domain/models/Dataset' @@ -20,6 +21,14 @@ import { DatasetVersionDiff } from '../../domain/models/DatasetVersionDiff' import { transformDatasetVersionDiffResponseToDatasetVersionDiff } from './transformers/datasetVersionDiffTransformers' import { DatasetDownloadCount } from '../../domain/models/DatasetDownloadCount' import { DatasetVersionSummaryInfo } from '../../domain/models/DatasetVersionSummaryInfo' +import { DatasetLinkedCollection } from '../../domain/models/DatasetLinkedCollection' +import { CitationFormat } from '../../domain/models/CitationFormat' +import { transformDatasetLinkedCollectionsResponseToDatasetLinkedCollection } from './transformers/datasetLinkedCollectionsTransformers' +import { FormattedCitation } from '../../domain/models/FormattedCitation' +import { DatasetTemplate } from '../../domain/models/DatasetTemplate' +import { DatasetTemplatePayload } from './transformers/DatasetTemplatePayload' +import { transformDatasetTemplatePayloadToDatasetTemplate } from './transformers/datasetTemplateTransformers' +import { DatasetType } from '../../domain/models/DatasetType' export interface GetAllDatasetPreviewsQueryParams { per_page?: number @@ -74,7 +83,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi } public async getDatasetCitation( - datasetId: number, + datasetId: number | string, datasetVersionId: string, includeDeaccessioned: boolean ): Promise { @@ -93,6 +102,33 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi }) } + public async getDatasetCitationInOtherFormats( + datasetId: number | string, + datasetVersionId: string | 'LATEST' = 'LATEST', + format: CitationFormat, + includeDeaccessioned = false + ): Promise { + const endpoint = this.buildApiEndpoint( + this.datasetsResourceName, + `versions/${datasetVersionId}/citation/${format}`, + datasetId + ) + const response = await this.doGet(endpoint, true, { includeDeaccessioned }) + + const contentType = response.headers['content-type'] + let content: string + if (contentType && contentType.includes('application/json')) { + content = JSON.stringify(response.data) + } else { + content = response.data + } + + return { + content, + contentType + } + } + public async getPrivateUrlDatasetCitation(token: string): Promise { return this.doGet( this.buildApiEndpoint(this.datasetsResourceName, `privateUrlDatasetVersion/${token}/citation`) @@ -172,11 +208,16 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi public async createDataset( newDataset: DatasetDTO, datasetMetadataBlocks: MetadataBlock[], - collectionId: string + collectionId: string, + datasetType?: string ): Promise { return this.doPost( `/dataverses/${collectionId}/datasets`, - transformDatasetModelToNewDatasetRequestPayload(newDataset, datasetMetadataBlocks) + transformDatasetModelToNewDatasetRequestPayload( + newDataset, + datasetMetadataBlocks, + datasetType + ) ) .then((response) => { const responseData = response.data.data @@ -287,4 +328,129 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi throw error }) } + + public async linkDataset(datasetId: number, collectionAlias: string): Promise { + return this.doPut(`/${this.datasetsResourceName}/${datasetId}/link/${collectionAlias}`, {}) + .then(() => undefined) + .catch((error) => { + throw error + }) + } + + public async unlinkDataset(datasetId: number, collectionAlias: string): Promise { + return this.doDelete(`/${this.datasetsResourceName}/${datasetId}/deleteLink/${collectionAlias}`) + .then(() => undefined) + .catch((error) => { + throw error + }) + } + + public async getDatasetLinkedCollections( + datasetId: number | string + ): Promise { + return this.doGet(this.buildApiEndpoint(this.datasetsResourceName, 'links', datasetId), true) + .then((response) => + transformDatasetLinkedCollectionsResponseToDatasetLinkedCollection(response.data.data) + ) + .catch((error) => { + throw error + }) + } + + public async getDatasetAvailableCategories(datasetId: number | string): Promise { + return this.doGet( + this.buildApiEndpoint(this.datasetsResourceName, 'availableFileCategories', datasetId), + true + ) + .then((response) => response.data.data as string[]) + .catch((error) => { + throw error + }) + } + + public async getDatasetTemplates( + collectionIdOrAlias: number | string + ): Promise { + return this.doGet(`/dataverses/${collectionIdOrAlias}/templates`, true) + .then((response: AxiosResponse<{ data: DatasetTemplatePayload[] }>) => + transformDatasetTemplatePayloadToDatasetTemplate(response.data.data) + ) + .catch((error) => { + throw error + }) + } + + public async getDatasetAvailableDatasetTypes(): Promise { + return this.doGet(this.buildApiEndpoint(this.datasetsResourceName, 'datasetTypes')) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } + + public async getDatasetAvailableDatasetType( + datasetTypeId: number | string + ): Promise { + const endpoint = this.buildApiEndpoint( + this.datasetsResourceName, + 'datasetTypes/' + datasetTypeId + ) + return this.doGet(endpoint) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } + + public async addDatasetType(datasetType: DatasetType): Promise { + return this.doPost( + this.buildApiEndpoint(this.datasetsResourceName, 'datasetTypes'), + datasetType + ) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } + + public async linkDatasetTypeWithMetadataBlocks( + datasetTypeId: number | string, + metadataBlocks: string[] + ): Promise { + return this.doPut( + this.buildApiEndpoint(this.datasetsResourceName, 'datasetTypes/' + datasetTypeId), + metadataBlocks + ) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } + + public async setAvailableLicensesForDatasetType( + datasetTypeId: number | string, + licenses: string[] + ): Promise { + return this.doPut( + this.buildApiEndpoint( + this.datasetsResourceName, + 'datasetTypes/' + datasetTypeId + '/licenses' + ), + licenses + ) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } + + public async deleteDatasetType(datasetTypeId: number): Promise { + return this.doDelete( + this.buildApiEndpoint(this.datasetsResourceName, 'datasetTypes/' + datasetTypeId) + ) + .then((response) => response.data.data) + .catch((error) => { + throw error + }) + } } diff --git a/src/datasets/infra/repositories/transformers/DatasetLinkedCollectionsPayload.ts b/src/datasets/infra/repositories/transformers/DatasetLinkedCollectionsPayload.ts new file mode 100644 index 00000000..addff397 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/DatasetLinkedCollectionsPayload.ts @@ -0,0 +1,9 @@ +export interface DatasetLinkedCollectionsPayload { + id: number + identifier: string + 'linked-dataverses': { + id: number + alias: string + displayName: string + }[] +} diff --git a/src/datasets/infra/repositories/transformers/DatasetPayload.ts b/src/datasets/infra/repositories/transformers/DatasetPayload.ts index 6378ac45..b0535677 100644 --- a/src/datasets/infra/repositories/transformers/DatasetPayload.ts +++ b/src/datasets/infra/repositories/transformers/DatasetPayload.ts @@ -13,7 +13,7 @@ export interface DatasetPayload { lastUpdateTime: string releaseTime: string metadataBlocks: MetadataBlocksPayload - license?: LicensePayload + license?: DatasetLicensePayload alternativePersistentId?: string publicationDate?: string citationDate?: string @@ -36,9 +36,10 @@ export interface DatasetPayload { files: FilePayload[] isPartOf: OwnerNodePayload deaccessionNote?: string + datasetType?: string } -export interface LicensePayload { +export interface DatasetLicensePayload { name: string uri: string iconUri?: string diff --git a/src/datasets/infra/repositories/transformers/DatasetTemplatePayload.ts b/src/datasets/infra/repositories/transformers/DatasetTemplatePayload.ts new file mode 100644 index 00000000..e43e96eb --- /dev/null +++ b/src/datasets/infra/repositories/transformers/DatasetTemplatePayload.ts @@ -0,0 +1,52 @@ +import { LicensePayload } from '../../../../licenses/domain/repositories/transformers/LicensePayload' +import { MetadataFieldPayload } from './DatasetPayload' + +export interface DatasetTemplatePayload { + id: number + name: string + dataverseAlias: string + isDefault: boolean + usageCount: number + createTime: string + createDate: string + // 👇 From Edit Template Metadata + datasetFields: DatasetFieldsPayload + instructions: Instruction[] + // 👇 From Edit Template Terms + termsOfUseAndAccess: { + id: number + fileAccessRequest: boolean + // This license property is going to be present if not custom terms are added in the UI + license?: LicensePayload + // Below fields are going to be present if are added in "Restricted Files + Terms of Access" + termsOfAccess?: string // This is terms of access for restricted files in the JSF UI + dataAccessPlace?: string + originalArchive?: string + availabilityStatus?: string + sizeOfCollection?: string + studyCompletion?: string + contactForAccess?: string + // Below fields are going to be present if custom terms are added in the UI, they will be mapped and grouped under customTerms + termsOfUse?: string + confidentialityDeclaration?: string + specialPermissions?: string + restrictions?: string + citationRequirements?: string + depositorRequirements?: string + conditions?: string + disclaimer?: string + } +} + +type DatasetFieldsPayload = Record + +interface DatasetFieldInfoPayload { + displayName: string + name: string + fields: MetadataFieldPayload[] +} + +interface Instruction { + instructionField: string + instructionText: string +} diff --git a/src/datasets/infra/repositories/transformers/datasetLinkedCollectionsTransformers.ts b/src/datasets/infra/repositories/transformers/datasetLinkedCollectionsTransformers.ts new file mode 100644 index 00000000..66ae1ee5 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/datasetLinkedCollectionsTransformers.ts @@ -0,0 +1,12 @@ +import { DatasetLinkedCollection } from '../../../domain/models/DatasetLinkedCollection' +import { DatasetLinkedCollectionsPayload } from './DatasetLinkedCollectionsPayload' + +export const transformDatasetLinkedCollectionsResponseToDatasetLinkedCollection = ( + payload: DatasetLinkedCollectionsPayload +): DatasetLinkedCollection[] => { + return payload['linked-dataverses'].map((linkedDataverse) => ({ + id: linkedDataverse.id, + alias: linkedDataverse.alias, + displayName: linkedDataverse.displayName + })) +} diff --git a/src/datasets/infra/repositories/transformers/datasetTemplateTransformers.ts b/src/datasets/infra/repositories/transformers/datasetTemplateTransformers.ts new file mode 100644 index 00000000..32486199 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/datasetTemplateTransformers.ts @@ -0,0 +1,57 @@ +import { transformPayloadLicenseToLicense } from '../../../../licenses/domain/repositories/transformers/licenseTransformers' +import { DatasetTemplate } from '../../../domain/models/DatasetTemplate' +import { DatasetTemplatePayload } from './DatasetTemplatePayload' +import { transformPayloadToDatasetMetadataBlocks } from './datasetTransformers' + +export const transformDatasetTemplatePayloadToDatasetTemplate = ( + collectionDatasetTemplatePayload: DatasetTemplatePayload[] +): DatasetTemplate[] => { + return collectionDatasetTemplatePayload.map((payload) => { + const datasetTemplate: DatasetTemplate = { + id: payload.id, + name: payload.name, + collectionAlias: payload.dataverseAlias, + isDefault: payload.isDefault, + usageCount: payload.usageCount, + createTime: payload.createTime, + createDate: payload.createDate, + datasetMetadataBlocks: transformPayloadToDatasetMetadataBlocks(payload.datasetFields, false), + instructions: payload.instructions.map((instruction) => ({ + instructionField: instruction.instructionField, + instructionText: instruction.instructionText + })), + termsOfUse: { + termsOfAccess: { + fileAccessRequest: payload.termsOfUseAndAccess.fileAccessRequest, + termsOfAccessForRestrictedFiles: payload.termsOfUseAndAccess.termsOfAccess, + dataAccessPlace: payload.termsOfUseAndAccess.dataAccessPlace, + originalArchive: payload.termsOfUseAndAccess.originalArchive, + availabilityStatus: payload.termsOfUseAndAccess.availabilityStatus, + contactForAccess: payload.termsOfUseAndAccess.contactForAccess, + sizeOfCollection: payload.termsOfUseAndAccess.sizeOfCollection, + studyCompletion: payload.termsOfUseAndAccess.studyCompletion + } + } + } + + if (payload.termsOfUseAndAccess.license) { + datasetTemplate.license = transformPayloadLicenseToLicense( + payload.termsOfUseAndAccess.license + ) + } else { + datasetTemplate.termsOfUse.customTerms = { + termsOfUse: payload.termsOfUseAndAccess.termsOfUse as string, + confidentialityDeclaration: payload.termsOfUseAndAccess + .confidentialityDeclaration as string, + specialPermissions: payload.termsOfUseAndAccess.specialPermissions as string, + restrictions: payload.termsOfUseAndAccess.restrictions as string, + citationRequirements: payload.termsOfUseAndAccess.citationRequirements as string, + depositorRequirements: payload.termsOfUseAndAccess.depositorRequirements as string, + conditions: payload.termsOfUseAndAccess.conditions as string, + disclaimer: payload.termsOfUseAndAccess.disclaimer as string + } + } + + return datasetTemplate + }) +} diff --git a/src/datasets/infra/repositories/transformers/datasetTransformers.ts b/src/datasets/infra/repositories/transformers/datasetTransformers.ts index e5e88ebd..bbb4c9fc 100644 --- a/src/datasets/infra/repositories/transformers/datasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/datasetTransformers.ts @@ -11,7 +11,7 @@ import { import { AxiosResponse } from 'axios' import { DatasetPayload, - LicensePayload, + DatasetLicensePayload, MetadataFieldPayload, MetadataBlocksPayload, MetadataFieldValuePayload, @@ -31,6 +31,7 @@ import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks' const turndownService = new TurndownService() export interface NewDatasetRequestPayload { + datasetType?: string datasetVersion: { license?: DatasetLicense metadataBlocks: Record @@ -96,9 +97,11 @@ export const transformDatasetModelToUpdateDatasetRequestPayload = ( export const transformDatasetModelToNewDatasetRequestPayload = ( dataset: DatasetDTO, - metadataBlocks: MetadataBlock[] + metadataBlocks: MetadataBlock[], + datasetType?: string ): NewDatasetRequestPayload => { return { + datasetType, datasetVersion: { ...(dataset.license && { license: dataset.license }), metadataBlocks: transformMetadataBlockModelsToRequestPayload( @@ -261,7 +264,7 @@ export const transformVersionPayloadToDataset = ( } if ('license' in versionPayload) { datasetModel.license = transformPayloadToDatasetLicense( - versionPayload.license as LicensePayload + versionPayload.license as DatasetLicensePayload ) } else { datasetModel.termsOfUse.customTerms = { @@ -293,11 +296,14 @@ export const transformVersionPayloadToDataset = ( if ('citationDate' in versionPayload) { datasetModel.citationDate = versionPayload.citationDate } + if ('datasetType' in versionPayload) { + datasetModel.datasetType = versionPayload.datasetType + } return datasetModel } const transformPayloadToDatasetLicense = ( - licensePayload: LicensePayload + licensePayload: DatasetLicensePayload ): DatasetLicense | undefined => { if (!licensePayload) { return undefined @@ -325,7 +331,7 @@ const transformPayloadText = ( return keepRawFields ? text : transformHtmlToMarkdown(text) } -const transformPayloadToDatasetMetadataBlocks = ( +export const transformPayloadToDatasetMetadataBlocks = ( metadataBlocksPayload: MetadataBlocksPayload, keepRawFields: boolean ): DatasetMetadataBlocks => { diff --git a/src/externalTools/domain/dtos/GetExternalToolDTO.ts b/src/externalTools/domain/dtos/GetExternalToolDTO.ts new file mode 100644 index 00000000..7e415d25 --- /dev/null +++ b/src/externalTools/domain/dtos/GetExternalToolDTO.ts @@ -0,0 +1,9 @@ +/** + * @property {boolean} preview - boolean flag to indicate if the request is for previewing the tool or not. + * @property {string} locale - string specifying the locale for internationalization + */ + +export interface GetExternalToolDTO { + preview: boolean + locale: string +} diff --git a/src/externalTools/domain/models/ExternalTool.ts b/src/externalTools/domain/models/ExternalTool.ts new file mode 100644 index 00000000..0e3acdeb --- /dev/null +++ b/src/externalTools/domain/models/ExternalTool.ts @@ -0,0 +1,37 @@ +export interface ExternalTool { + id: number + displayName: string + description: string + types: ToolType[] + scope: ToolScope + contentType?: string // Only present when scope is 'file' + toolParameters?: { queryParameters?: Record[] } + allowedApiCalls?: { name: string; httpMethod: string; urlTemplate: string; timeOut: number }[] + requirements?: { auxFilesExist: { formatTag: string; formatVersion: string }[] } +} + +export enum ToolType { + Explore = 'explore', + Configure = 'configure', + Preview = 'preview', + Query = 'query' +} + +export enum ToolScope { + Dataset = 'dataset', + File = 'file' +} + +export interface DatasetExternalToolResolved { + toolUrlResolved: string + displayName: string + datasetId: number + preview: boolean +} + +export interface FileExternalToolResolved { + toolUrlResolved: string + displayName: string + fileId: number + preview: boolean +} diff --git a/src/externalTools/domain/repositories/IExternalToolsRepository.ts b/src/externalTools/domain/repositories/IExternalToolsRepository.ts new file mode 100644 index 00000000..36db3870 --- /dev/null +++ b/src/externalTools/domain/repositories/IExternalToolsRepository.ts @@ -0,0 +1,20 @@ +import { GetExternalToolDTO } from '../dtos/GetExternalToolDTO' +import { + DatasetExternalToolResolved, + ExternalTool, + FileExternalToolResolved +} from '../models/ExternalTool' + +export interface IExternalToolsRepository { + getExternalTools(): Promise + getDatasetExternalToolResolved( + datasetId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise + getFileExternalToolResolved( + fileId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise +} diff --git a/src/externalTools/domain/useCases/GetDatasetExternalToolResolved.ts b/src/externalTools/domain/useCases/GetDatasetExternalToolResolved.ts new file mode 100644 index 00000000..c1668529 --- /dev/null +++ b/src/externalTools/domain/useCases/GetDatasetExternalToolResolved.ts @@ -0,0 +1,34 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { GetExternalToolDTO } from '../dtos/GetExternalToolDTO' +import { DatasetExternalToolResolved } from '../models/ExternalTool' +import { IExternalToolsRepository } from '../repositories/IExternalToolsRepository' + +export class GetDatasetExternalToolResolved implements UseCase { + private externalToolsRepository: IExternalToolsRepository + + constructor(externalToolsRepository: IExternalToolsRepository) { + this.externalToolsRepository = externalToolsRepository + } + + /** + * Returns a DatasetExternalToolResolved object containing the resolved URL for accessing an external tool that operates at the dataset level. + * The URL includes necessary authentication tokens and parameters based on the user's permissions and the tool's configuration. + * Authentication is required for draft or deaccessioned datasets and the user must have ViewUnpublishedDataset permission. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {number} toolId - The identifier of the external tool. + * @param {GetExternalToolDTO} getExternalToolDTO - The GetExternalToolDTO object containing additional parameters for the request. + * @returns {Promise} + */ + async execute( + datasetId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise { + return await this.externalToolsRepository.getDatasetExternalToolResolved( + datasetId, + toolId, + getExternalToolDTO + ) + } +} diff --git a/src/externalTools/domain/useCases/GetExternalTools.ts b/src/externalTools/domain/useCases/GetExternalTools.ts new file mode 100644 index 00000000..09dd83f2 --- /dev/null +++ b/src/externalTools/domain/useCases/GetExternalTools.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { ExternalTool } from '../models/ExternalTool' +import { IExternalToolsRepository } from '../repositories/IExternalToolsRepository' + +export class GetExternalTools implements UseCase { + private externalToolsRepository: IExternalToolsRepository + + constructor(externalToolsRepository: IExternalToolsRepository) { + this.externalToolsRepository = externalToolsRepository + } + + /** + * Returns a list containing all the external tools available in the installation. + * + * @returns {Promise} + */ + async execute(): Promise { + return await this.externalToolsRepository.getExternalTools() + } +} diff --git a/src/externalTools/domain/useCases/GetFileExternalToolResolved.ts b/src/externalTools/domain/useCases/GetFileExternalToolResolved.ts new file mode 100644 index 00000000..47f6b090 --- /dev/null +++ b/src/externalTools/domain/useCases/GetFileExternalToolResolved.ts @@ -0,0 +1,34 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { GetExternalToolDTO } from '../dtos/GetExternalToolDTO' +import { FileExternalToolResolved } from '../models/ExternalTool' +import { IExternalToolsRepository } from '../repositories/IExternalToolsRepository' + +export class GetFileExternalToolResolved implements UseCase { + private externalToolsRepository: IExternalToolsRepository + + constructor(externalToolsRepository: IExternalToolsRepository) { + this.externalToolsRepository = externalToolsRepository + } + + /** + * Returns a FileExternalToolResolved object containing the resolved URL for accessing an external tool that operates at the file level. + * The URL includes necessary authentication tokens and parameters based on the user's permissions and the tool's configuration. + * Authentication is required for draft, restricted, embargoed, or expired (retention period) files, the user must have appropriate permissions. + * + * @param {number | string} [fileId] - The File identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {number} toolId - The identifier of the external tool. + * @param {GetExternalToolDTO} getExternalToolDTO - The GetExternalToolDTO object containing additional parameters for the request. + * @returns {Promise} + */ + async execute( + fileId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise { + return await this.externalToolsRepository.getFileExternalToolResolved( + fileId, + toolId, + getExternalToolDTO + ) + } +} diff --git a/src/externalTools/index.ts b/src/externalTools/index.ts new file mode 100644 index 00000000..96b2581f --- /dev/null +++ b/src/externalTools/index.ts @@ -0,0 +1,25 @@ +import { GetDatasetExternalToolResolved } from './domain/useCases/GetDatasetExternalToolResolved' +import { GetExternalTools } from './domain/useCases/GetExternalTools' +import { GetFileExternalToolResolved } from './domain/useCases/GetFileExternalToolResolved' +import { ExternalToolsRepository } from './infra/ExternalToolsRepository' + +const externalToolsRepository = new ExternalToolsRepository() + +const getExternalTools = new GetExternalTools(externalToolsRepository) +const getDatasetExternalToolResolved = new GetDatasetExternalToolResolved(externalToolsRepository) +const getFileExternalToolResolved = new GetFileExternalToolResolved(externalToolsRepository) + +export { + getExternalTools, + getDatasetExternalToolResolved, + getFileExternalToolResolved, + externalToolsRepository +} + +export { + ExternalTool, + ToolScope, + ToolType, + DatasetExternalToolResolved, + FileExternalToolResolved +} from './domain/models/ExternalTool' diff --git a/src/externalTools/infra/ExternalToolsRepository.ts b/src/externalTools/infra/ExternalToolsRepository.ts new file mode 100644 index 00000000..5e104593 --- /dev/null +++ b/src/externalTools/infra/ExternalToolsRepository.ts @@ -0,0 +1,53 @@ +import { IExternalToolsRepository } from '../domain/repositories/IExternalToolsRepository' +import { ApiRepository } from '../../core/infra/repositories/ApiRepository' +import { + DatasetExternalToolResolved, + ExternalTool, + FileExternalToolResolved +} from '../domain/models/ExternalTool' +import { GetExternalToolDTO } from '../domain/dtos/GetExternalToolDTO' +import { datasetExternalToolTransformer } from './transformers/datasetExternalToolTransformer' +import { fileExternalToolTransformer } from './transformers/fileExternalToolTransformer' +import { externalToolsTransformer } from './transformers/externalToolsTransformer' + +export class ExternalToolsRepository extends ApiRepository implements IExternalToolsRepository { + private readonly externalToolsResourceName: string = 'externalTools' + + public async getExternalTools(): Promise { + return this.doGet(this.buildApiEndpoint(this.externalToolsResourceName)) + .then((response) => externalToolsTransformer(response)) + .catch((error) => { + throw error + }) + } + + public async getDatasetExternalToolResolved( + datasetId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise { + return this.doPost( + this.buildApiEndpoint('datasets', `externalTool/${toolId}/toolUrl`, datasetId), + getExternalToolDTO + ) + .then((response) => datasetExternalToolTransformer(response)) + .catch((error) => { + throw error + }) + } + + public async getFileExternalToolResolved( + fileId: number | string, + toolId: number, + getExternalToolDTO: GetExternalToolDTO + ): Promise { + return this.doPost( + this.buildApiEndpoint('files', `externalTool/${toolId}/toolUrl`, fileId), + getExternalToolDTO + ) + .then((response) => fileExternalToolTransformer(response)) + .catch((error) => { + throw error + }) + } +} diff --git a/src/externalTools/infra/transformers/ExternalToolPayload.ts b/src/externalTools/infra/transformers/ExternalToolPayload.ts new file mode 100644 index 00000000..a6f97067 --- /dev/null +++ b/src/externalTools/infra/transformers/ExternalToolPayload.ts @@ -0,0 +1,13 @@ +import { ToolScope, ToolType } from '../../domain/models/ExternalTool' + +export interface ExternalToolPayload { + id: number + displayName: string + description: string + types: ToolType[] + scope: ToolScope + contentType?: string // Only present when scope is 'file' + toolParameters?: { queryParameters?: Record[] } + allowedApiCalls?: { name: string; httpMethod: string; urlTemplate: string; timeOut: number }[] + requirements?: { auxFilesExist: { formatTag: string; formatVersion: string }[] } +} diff --git a/src/externalTools/infra/transformers/datasetExternalToolTransformer.ts b/src/externalTools/infra/transformers/datasetExternalToolTransformer.ts new file mode 100644 index 00000000..fef953ca --- /dev/null +++ b/src/externalTools/infra/transformers/datasetExternalToolTransformer.ts @@ -0,0 +1,17 @@ +import { AxiosResponse } from 'axios' +import { DatasetExternalToolResolved } from '../../domain/models/ExternalTool' + +export const datasetExternalToolTransformer = ( + response: AxiosResponse<{ + data: { toolUrl: string; displayName: string; datasetId: number; preview: boolean } + }> +): DatasetExternalToolResolved => { + const datasetExtTool = response.data.data + + return { + toolUrlResolved: datasetExtTool.toolUrl, + displayName: datasetExtTool.displayName, + datasetId: datasetExtTool.datasetId, + preview: datasetExtTool.preview + } +} diff --git a/src/externalTools/infra/transformers/externalToolsTransformer.ts b/src/externalTools/infra/transformers/externalToolsTransformer.ts new file mode 100644 index 00000000..69fbfdb2 --- /dev/null +++ b/src/externalTools/infra/transformers/externalToolsTransformer.ts @@ -0,0 +1,23 @@ +import { AxiosResponse } from 'axios' +import { ExternalTool } from '../../domain/models/ExternalTool' +import { ExternalToolPayload } from './ExternalToolPayload' + +export const externalToolsTransformer = ( + response: AxiosResponse<{ + data: ExternalToolPayload[] + }> +): ExternalTool[] => { + const tools = response.data.data + + return tools.map((tool) => ({ + id: tool.id, + displayName: tool.displayName, + description: tool.description, + types: tool.types, + scope: tool.scope, + contentType: tool.contentType, + toolParameters: tool.toolParameters, + allowedApiCalls: tool.allowedApiCalls, + requirements: tool.requirements + })) +} diff --git a/src/externalTools/infra/transformers/fileExternalToolTransformer.ts b/src/externalTools/infra/transformers/fileExternalToolTransformer.ts new file mode 100644 index 00000000..f6305fb0 --- /dev/null +++ b/src/externalTools/infra/transformers/fileExternalToolTransformer.ts @@ -0,0 +1,17 @@ +import { AxiosResponse } from 'axios' +import { FileExternalToolResolved } from '../../domain/models/ExternalTool' + +export const fileExternalToolTransformer = ( + response: AxiosResponse<{ + data: { toolUrl: string; displayName: string; fileId: number; preview: boolean } + }> +): FileExternalToolResolved => { + const fileExtTool = response.data.data + + return { + toolUrlResolved: fileExtTool.toolUrl, + displayName: fileExtTool.displayName, + fileId: fileExtTool.fileId, + preview: fileExtTool.preview + } +} diff --git a/src/files/domain/useCases/UpdateFileCategories.ts b/src/files/domain/useCases/UpdateFileCategories.ts index ea568073..fc2e496d 100644 --- a/src/files/domain/useCases/UpdateFileCategories.ts +++ b/src/files/domain/useCases/UpdateFileCategories.ts @@ -10,7 +10,7 @@ export class UpdateFileCategories implements UseCase { /** * Updates the categories for a particular File. - * More detailed information about updating a file's categories behavior can be found in https://guides.dataverse.org/en/latest/api/native-api.html#updating-file-metadata + * More detailed information about updating a file's categories behavior can be found in https://guides.dataverse.org/en/latest/api/native-api.html#updating-file-metadata-categories * * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). * @param {string[]} [categories] - The categories to be added to the file. diff --git a/src/files/domain/useCases/UpdateFileTabularTags.ts b/src/files/domain/useCases/UpdateFileTabularTags.ts index 3b777a97..2f6eb543 100644 --- a/src/files/domain/useCases/UpdateFileTabularTags.ts +++ b/src/files/domain/useCases/UpdateFileTabularTags.ts @@ -10,7 +10,7 @@ export class UpdateFileTabularTags implements UseCase { /** * Updates the tabular tabular Tags for a particular File. - * More detailed information about updating a file's tabularTags behavior can be found in https://guides.dataverse.org/en/latest/api/native-api.html#updating-file-metadata + * More detailed information about updating a file's tabularTags behavior can be found in https://guides.dataverse.org/en/latest/api/native-api.html#updating-file-tabular-tags * * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). * @param {string[]} [tabularTags] - The tabular tags to be added to the file. diff --git a/src/index.ts b/src/index.ts index 89a79af6..9e64baa6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,3 +8,7 @@ export * from './collections' export * from './metadataBlocks' export * from './files' export * from './contactInfo' +export * from './notifications' +export * from './search' +export * from './licenses' +export * from './externalTools' diff --git a/src/info/domain/models/DatasetMetadataExportFormats.ts b/src/info/domain/models/DatasetMetadataExportFormats.ts new file mode 100644 index 00000000..a749b49d --- /dev/null +++ b/src/info/domain/models/DatasetMetadataExportFormats.ts @@ -0,0 +1,16 @@ +export type DatasetMetadataExportFormats = Record + +type DatasetMetadataExportFormat = DatasetMetadataExportFormatBase | XmlDatasetMetadataExportFormat + +interface DatasetMetadataExportFormatBase { + displayName: string + mediaType: string + isHarvestable: boolean + isVisibleInUserInterface: boolean +} + +interface XmlDatasetMetadataExportFormat extends DatasetMetadataExportFormatBase { + XMLNameSpace: string + XMLSchemaLocation: string + XMLSchemaVersion: string +} diff --git a/src/info/domain/repositories/IDataverseInfoRepository.ts b/src/info/domain/repositories/IDataverseInfoRepository.ts index 0ec6c747..e0e85644 100644 --- a/src/info/domain/repositories/IDataverseInfoRepository.ts +++ b/src/info/domain/repositories/IDataverseInfoRepository.ts @@ -1,3 +1,4 @@ +import { DatasetMetadataExportFormats } from '../models/DatasetMetadataExportFormats' import { DataverseVersion } from '../models/DataverseVersion' export interface IDataverseInfoRepository { @@ -5,4 +6,5 @@ export interface IDataverseInfoRepository { getZipDownloadLimit(): Promise getMaxEmbargoDurationInMonths(): Promise getApplicationTermsOfUse(lang?: string): Promise + getAvailableDatasetMetadataExportFormats(): Promise } diff --git a/src/info/domain/useCases/GetAvailableDatasetMetadataExportFormats.ts b/src/info/domain/useCases/GetAvailableDatasetMetadataExportFormats.ts new file mode 100644 index 00000000..6faa99dc --- /dev/null +++ b/src/info/domain/useCases/GetAvailableDatasetMetadataExportFormats.ts @@ -0,0 +1,22 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { DatasetMetadataExportFormats } from '../models/DatasetMetadataExportFormats' +import { IDataverseInfoRepository } from '../repositories/IDataverseInfoRepository' + +export class GetAvailableDatasetMetadataExportFormats + implements UseCase +{ + private dataverseInfoRepository: IDataverseInfoRepository + + constructor(dataverseInfoRepository: IDataverseInfoRepository) { + this.dataverseInfoRepository = dataverseInfoRepository + } + + /** + * Returns a DatasetMetadataExportFormats object containing the available dataset metadata export formats. + * + * @returns {Promise} + */ + async execute(): Promise { + return await this.dataverseInfoRepository.getAvailableDatasetMetadataExportFormats() + } +} diff --git a/src/info/index.ts b/src/info/index.ts index 049cc48d..3837e282 100644 --- a/src/info/index.ts +++ b/src/info/index.ts @@ -3,6 +3,7 @@ import { GetDataverseVersion } from './domain/useCases/GetDataverseVersion' import { GetZipDownloadLimit } from './domain/useCases/GetZipDownloadLimit' import { GetMaxEmbargoDurationInMonths } from './domain/useCases/GetMaxEmbargoDurationInMonths' import { GetApplicationTermsOfUse } from './domain/useCases/GetApplicationTermsOfUse' +import { GetAvailableDatasetMetadataExportFormats } from './domain/useCases/GetAvailableDatasetMetadataExportFormats' const dataverseInfoRepository = new DataverseInfoRepository() @@ -10,10 +11,16 @@ const getDataverseVersion = new GetDataverseVersion(dataverseInfoRepository) const getZipDownloadLimit = new GetZipDownloadLimit(dataverseInfoRepository) const getMaxEmbargoDurationInMonths = new GetMaxEmbargoDurationInMonths(dataverseInfoRepository) const getApplicationTermsOfUse = new GetApplicationTermsOfUse(dataverseInfoRepository) +const getAvailableDatasetMetadataExportFormats = new GetAvailableDatasetMetadataExportFormats( + dataverseInfoRepository +) export { getDataverseVersion, getZipDownloadLimit, getMaxEmbargoDurationInMonths, - getApplicationTermsOfUse + getApplicationTermsOfUse, + getAvailableDatasetMetadataExportFormats } + +export { DatasetMetadataExportFormats } from './domain/models/DatasetMetadataExportFormats' diff --git a/src/info/infra/repositories/DataverseInfoRepository.ts b/src/info/infra/repositories/DataverseInfoRepository.ts index c4de4a22..5e8aa5e0 100644 --- a/src/info/infra/repositories/DataverseInfoRepository.ts +++ b/src/info/infra/repositories/DataverseInfoRepository.ts @@ -2,6 +2,7 @@ import { ApiRepository } from '../../../core/infra/repositories/ApiRepository' import { IDataverseInfoRepository } from '../../domain/repositories/IDataverseInfoRepository' import { DataverseVersion } from '../../domain/models/DataverseVersion' import { AxiosResponse } from 'axios' +import { DatasetMetadataExportFormats } from '../../domain/models/DatasetMetadataExportFormats' export class DataverseInfoRepository extends ApiRepository implements IDataverseInfoRepository { private readonly infoResourceName: string = 'info' @@ -55,4 +56,14 @@ export class DataverseInfoRepository extends ApiRepository implements IDataverse throw error }) } + + public async getAvailableDatasetMetadataExportFormats(): Promise { + return this.doGet(this.buildApiEndpoint(this.infoResourceName, `exportFormats`)) + .then((response: AxiosResponse<{ data: DatasetMetadataExportFormats }>) => { + return response.data.data + }) + .catch((error) => { + throw error + }) + } } diff --git a/src/licenses/domain/models/License.ts b/src/licenses/domain/models/License.ts new file mode 100644 index 00000000..7f16442e --- /dev/null +++ b/src/licenses/domain/models/License.ts @@ -0,0 +1,14 @@ +export interface License { + id: number + name: string + shortDescription?: string + uri: string + iconUri?: string + active: boolean + isDefault: boolean + sortOrder: number + rightsIdentifier?: string + rightsIdentifierScheme?: string + schemeUri?: string + languageCode?: string +} diff --git a/src/licenses/domain/repositories/ILicensesRepository.ts b/src/licenses/domain/repositories/ILicensesRepository.ts new file mode 100644 index 00000000..45309a89 --- /dev/null +++ b/src/licenses/domain/repositories/ILicensesRepository.ts @@ -0,0 +1,5 @@ +import { License } from '../models/License' + +export interface ILicensesRepository { + getAvailableStandardLicenses(): Promise +} diff --git a/src/licenses/domain/repositories/transformers/LicensePayload.ts b/src/licenses/domain/repositories/transformers/LicensePayload.ts new file mode 100644 index 00000000..a67228da --- /dev/null +++ b/src/licenses/domain/repositories/transformers/LicensePayload.ts @@ -0,0 +1,14 @@ +export interface LicensePayload { + id: number + name: string + shortDescription?: string + uri: string + iconUrl?: string + active: boolean + isDefault: boolean + sortOrder: number + rightsIdentifier?: string + rightsIdentifierScheme?: string + schemeUri?: string + languageCode?: string +} diff --git a/src/licenses/domain/repositories/transformers/licenseTransformers.ts b/src/licenses/domain/repositories/transformers/licenseTransformers.ts new file mode 100644 index 00000000..e36347d0 --- /dev/null +++ b/src/licenses/domain/repositories/transformers/licenseTransformers.ts @@ -0,0 +1,24 @@ +import { AxiosResponse } from 'axios' +import { License } from '../../models/License' +import { LicensePayload } from './LicensePayload' + +export const transformPayloadToLicenses = (response: AxiosResponse): License[] => { + const payload = response.data.data as LicensePayload[] + + return payload.map((license: LicensePayload) => transformPayloadLicenseToLicense(license)) +} + +export const transformPayloadLicenseToLicense = (license: LicensePayload): License => ({ + id: license.id, + name: license.name, + shortDescription: license.shortDescription, + uri: license.uri, + iconUri: license.iconUrl, // in payload, it is called iconUrl, but iconUri is the name matching everywhere else + active: license.active, + isDefault: license.isDefault, + sortOrder: license.sortOrder, + rightsIdentifier: license.rightsIdentifier, + rightsIdentifierScheme: license.rightsIdentifierScheme, + schemeUri: license.schemeUri, + languageCode: license.languageCode +}) diff --git a/src/licenses/domain/useCases/GetAvailableStandardLicenses.ts b/src/licenses/domain/useCases/GetAvailableStandardLicenses.ts new file mode 100644 index 00000000..00517770 --- /dev/null +++ b/src/licenses/domain/useCases/GetAvailableStandardLicenses.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { License } from '../models/License' +import { ILicensesRepository } from '../repositories/ILicensesRepository' + +export class GetAvailableStandardLicenses implements UseCase { + private licensesRepository: ILicensesRepository + + constructor(licensesRepository: ILicensesRepository) { + this.licensesRepository = licensesRepository + } + + /** + * Returns the list of available standard license terms that can be selected for a dataset. + * + * @returns {Promise} + */ + async execute(): Promise { + return await this.licensesRepository.getAvailableStandardLicenses() + } +} diff --git a/src/licenses/index.ts b/src/licenses/index.ts new file mode 100644 index 00000000..0d9158f3 --- /dev/null +++ b/src/licenses/index.ts @@ -0,0 +1,10 @@ +import { LicensesRepository } from './infra/repositories/LicensesRepository' +import { GetAvailableStandardLicenses } from './domain/useCases/GetAvailableStandardLicenses' + +const licensesRepository = new LicensesRepository() + +const getAvailableStandardLicenses = new GetAvailableStandardLicenses(licensesRepository) + +export { getAvailableStandardLicenses } + +export { License } from './domain/models/License' diff --git a/src/licenses/infra/repositories/LicensesRepository.ts b/src/licenses/infra/repositories/LicensesRepository.ts new file mode 100644 index 00000000..087d833d --- /dev/null +++ b/src/licenses/infra/repositories/LicensesRepository.ts @@ -0,0 +1,16 @@ +import { ApiRepository } from '../../../core/infra/repositories/ApiRepository' +import { ILicensesRepository } from '../../domain/repositories/ILicensesRepository' +import { License } from '../../domain/models/License' +import { transformPayloadToLicenses } from '../../domain/repositories/transformers/licenseTransformers' + +export class LicensesRepository extends ApiRepository implements ILicensesRepository { + private readonly licensesResourceName: string = 'licenses' + + public async getAvailableStandardLicenses(): Promise { + return this.doGet(this.buildApiEndpoint(this.licensesResourceName)) + .then((response) => transformPayloadToLicenses(response)) + .catch((error) => { + throw error + }) + } +} diff --git a/src/metadataBlocks/domain/models/MetadataBlock.ts b/src/metadataBlocks/domain/models/MetadataBlock.ts index b958d68d..b5cd5166 100644 --- a/src/metadataBlocks/domain/models/MetadataBlock.ts +++ b/src/metadataBlocks/domain/models/MetadataBlock.ts @@ -20,6 +20,7 @@ export interface MetadataFieldInfo { displayFormat: string childMetadataFields?: Record isRequired: boolean + isAdvancedSearchFieldType: boolean displayOrder: number displayOnCreate: boolean } diff --git a/src/metadataBlocks/domain/repositories/IMetadataBlocksRepository.ts b/src/metadataBlocks/domain/repositories/IMetadataBlocksRepository.ts index dd7a802d..7acc63db 100644 --- a/src/metadataBlocks/domain/repositories/IMetadataBlocksRepository.ts +++ b/src/metadataBlocks/domain/repositories/IMetadataBlocksRepository.ts @@ -5,7 +5,8 @@ export interface IMetadataBlocksRepository { getCollectionMetadataBlocks( collectionIdOrAlias: number | string, - onlyDisplayedOnCreate: boolean + onlyDisplayedOnCreate: boolean, + datasetType?: string ): Promise getAllMetadataBlocks(): Promise diff --git a/src/metadataBlocks/domain/useCases/GetCollectionMetadataBlocks.ts b/src/metadataBlocks/domain/useCases/GetCollectionMetadataBlocks.ts index c953c16a..1f71be22 100644 --- a/src/metadataBlocks/domain/useCases/GetCollectionMetadataBlocks.ts +++ b/src/metadataBlocks/domain/useCases/GetCollectionMetadataBlocks.ts @@ -16,15 +16,18 @@ export class GetCollectionMetadataBlocks implements UseCase { * @param {number | string} [collectionIdOrAlias = ':root'] - A generic collection identifier, which can be either a string (for queries by CollectionAlias), or a number (for queries by CollectionId) * If this parameter is not set, the default value is: ':root' * @param {boolean} [onlyDisplayedOnCreate=false] - Indicates whether or not to return only the metadata blocks that are displayed on dataset creation. The default value is false. + * @param {string} [datasetType] - The name of the dataset type. If provided, additional fields from metadata blocks linked to this dataset type will be returned. * @returns {Promise} */ async execute( collectionIdOrAlias: number | string = ROOT_COLLECTION_ID, - onlyDisplayedOnCreate = false + onlyDisplayedOnCreate = false, + datasetType?: string ): Promise { return await this.metadataBlocksRepository.getCollectionMetadataBlocks( collectionIdOrAlias, - onlyDisplayedOnCreate + onlyDisplayedOnCreate, + datasetType ) } } diff --git a/src/metadataBlocks/infra/repositories/MetadataBlocksRepository.ts b/src/metadataBlocks/infra/repositories/MetadataBlocksRepository.ts index ab308240..6b4f0287 100644 --- a/src/metadataBlocks/infra/repositories/MetadataBlocksRepository.ts +++ b/src/metadataBlocks/infra/repositories/MetadataBlocksRepository.ts @@ -17,11 +17,13 @@ export class MetadataBlocksRepository extends ApiRepository implements IMetadata public async getCollectionMetadataBlocks( collectionIdOrAlias: string | number, - onlyDisplayedOnCreate: boolean + onlyDisplayedOnCreate: boolean, + datasetType?: string ): Promise { return this.doGet(`/dataverses/${collectionIdOrAlias}/metadatablocks`, true, { onlyDisplayedOnCreate: onlyDisplayedOnCreate, - returnDatasetFieldTypes: true + returnDatasetFieldTypes: true, + datasetType: datasetType }) .then((response) => transformMetadataBlocksResponseToMetadataBlocks(response)) .catch((error) => { diff --git a/src/metadataBlocks/infra/repositories/transformers/MetadataFieldInfoPayload.ts b/src/metadataBlocks/infra/repositories/transformers/MetadataFieldInfoPayload.ts index a380a4b7..a3cf6446 100644 --- a/src/metadataBlocks/infra/repositories/transformers/MetadataFieldInfoPayload.ts +++ b/src/metadataBlocks/infra/repositories/transformers/MetadataFieldInfoPayload.ts @@ -12,6 +12,7 @@ export interface MetadataFieldInfoPayload { displayFormat: string displayOrder: number isRequired: boolean + isAdvancedSearchFieldType: boolean controlledVocabularyValues?: string[] childMetadataFields?: Record } diff --git a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts index aae50c18..478a472b 100644 --- a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts +++ b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts @@ -98,6 +98,7 @@ const transformPayloadMetadataFieldInfo = ( }), displayFormat: metadataFieldInfoPayload.displayFormat, isRequired: metadataFieldInfoPayload.isRequired, + isAdvancedSearchFieldType: metadataFieldInfoPayload.isAdvancedSearchFieldType, displayOrder: metadataFieldInfoPayload.displayOrder, typeClass: metadataFieldInfoPayload.typeClass as MetadataFieldTypeClass, displayOnCreate: metadataFieldInfoPayload.displayOnCreate diff --git a/src/notifications/domain/models/Notification.ts b/src/notifications/domain/models/Notification.ts new file mode 100644 index 00000000..001d0933 --- /dev/null +++ b/src/notifications/domain/models/Notification.ts @@ -0,0 +1,72 @@ +export enum NotificationType { + ASSIGNROLE = 'ASSIGNROLE', + REVOKEROLE = 'REVOKEROLE', + CREATEDV = 'CREATEDV', + CREATEDS = 'CREATEDS', + CREATEACC = 'CREATEACC', + SUBMITTEDDS = 'SUBMITTEDDS', + RETURNEDDS = 'RETURNEDDS', + PUBLISHEDDS = 'PUBLISHEDDS', + REQUESTFILEACCESS = 'REQUESTFILEACCESS', + GRANTFILEACCESS = 'GRANTFILEACCESS', + REJECTFILEACCESS = 'REJECTFILEACCESS', + FILESYSTEMIMPORT = 'FILESYSTEMIMPORT', + CHECKSUMIMPORT = 'CHECKSUMIMPORT', + CHECKSUMFAIL = 'CHECKSUMFAIL', + CONFIRMEMAIL = 'CONFIRMEMAIL', + APIGENERATED = 'APIGENERATED', + INGESTCOMPLETED = 'INGESTCOMPLETED', + INGESTCOMPLETEDWITHERRORS = 'INGESTCOMPLETEDWITHERRORS', + PUBLISHFAILED_PIDREG = 'PUBLISHFAILED_PIDREG', + WORKFLOW_SUCCESS = 'WORKFLOW_SUCCESS', + WORKFLOW_FAILURE = 'WORKFLOW_FAILURE', + STATUSUPDATED = 'STATUSUPDATED', + DATASETCREATED = 'DATASETCREATED', + DATASETMENTIONED = 'DATASETMENTIONED', + GLOBUSUPLOADCOMPLETED = 'GLOBUSUPLOADCOMPLETED', + GLOBUSUPLOADCOMPLETEDWITHERRORS = 'GLOBUSUPLOADCOMPLETEDWITHERRORS', + GLOBUSDOWNLOADCOMPLETED = 'GLOBUSDOWNLOADCOMPLETED', + GLOBUSDOWNLOADCOMPLETEDWITHERRORS = 'GLOBUSDOWNLOADCOMPLETEDWITHERRORS', + REQUESTEDFILEACCESS = 'REQUESTEDFILEACCESS', + GLOBUSUPLOADREMOTEFAILURE = 'GLOBUSUPLOADREMOTEFAILURE', + GLOBUSUPLOADLOCALFAILURE = 'GLOBUSUPLOADLOCALFAILURE', + PIDRECONCILED = 'PIDRECONCILED' +} + +export interface RoleAssignment { + id: number + assignee: string + definitionPointId: number + roleId: number + roleName: string + _roleAlias: string +} + +export interface Notification { + id: number + type: NotificationType + subjectText?: string + messageText?: string + sentTimestamp: string + displayAsRead: boolean + installationBrandName?: string + userGuidesBaseUrl?: string + userGuidesVersion?: string + userGuidesSectionPath?: string + roleAssignments?: RoleAssignment[] + collectionAlias?: string + collectionDisplayName?: string + datasetPersistentIdentifier?: string + datasetDisplayName?: string + ownerPersistentIdentifier?: string + ownerAlias?: string + ownerDisplayName?: string + requestorFirstName?: string + requestorLastName?: string + requestorEmail?: string + dataFileId?: number + dataFileDisplayName?: string + currentCurationStatus?: string + additionalInfo?: string + objectDeleted?: boolean +} diff --git a/src/notifications/domain/repositories/INotificationsRepository.ts b/src/notifications/domain/repositories/INotificationsRepository.ts new file mode 100644 index 00000000..9392c543 --- /dev/null +++ b/src/notifications/domain/repositories/INotificationsRepository.ts @@ -0,0 +1,8 @@ +import { Notification } from '../models/Notification' + +export interface INotificationsRepository { + getAllNotificationsByUser(inAppNotificationFormat?: boolean): Promise + deleteNotification(notificationId: number): Promise + getUnreadNotificationsCount(): Promise + markNotificationAsRead(notificationId: number): Promise +} diff --git a/src/notifications/domain/useCases/DeleteNotification.ts b/src/notifications/domain/useCases/DeleteNotification.ts new file mode 100644 index 00000000..ed57fc0b --- /dev/null +++ b/src/notifications/domain/useCases/DeleteNotification.ts @@ -0,0 +1,16 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { INotificationsRepository } from '../repositories/INotificationsRepository' + +/** + * Use case for deleting a specific notification for the current user. + * + * @param notificationId - The ID of the notification to delete. + * @returns {Promise} - A promise that resolves when the notification is deleted. + */ +export class DeleteNotification implements UseCase { + constructor(private readonly notificationsRepository: INotificationsRepository) {} + + async execute(notificationId: number): Promise { + return this.notificationsRepository.deleteNotification(notificationId) + } +} diff --git a/src/notifications/domain/useCases/GetAllNotificationsByUser.ts b/src/notifications/domain/useCases/GetAllNotificationsByUser.ts new file mode 100644 index 00000000..43555ccc --- /dev/null +++ b/src/notifications/domain/useCases/GetAllNotificationsByUser.ts @@ -0,0 +1,19 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { Notification } from '../models/Notification' +import { INotificationsRepository } from '../repositories/INotificationsRepository' + +export class GetAllNotificationsByUser implements UseCase { + constructor(private readonly notificationsRepository: INotificationsRepository) {} + + /** + * Use case for retrieving all notifications for the current user. + * + * @param inAppNotificationFormat - Optional parameter to retrieve fields needed for in-app notifications + * @returns {Promise} - A promise that resolves to an array of Notification instances. + */ + async execute(inAppNotificationFormat?: boolean): Promise { + return (await this.notificationsRepository.getAllNotificationsByUser( + inAppNotificationFormat + )) as Notification[] + } +} diff --git a/src/notifications/domain/useCases/GetUnreadNotificationsCount.ts b/src/notifications/domain/useCases/GetUnreadNotificationsCount.ts new file mode 100644 index 00000000..2e59c55e --- /dev/null +++ b/src/notifications/domain/useCases/GetUnreadNotificationsCount.ts @@ -0,0 +1,19 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { INotificationsRepository } from '../repositories/INotificationsRepository' + +export class GetUnreadNotificationsCount implements UseCase { + private notificationsRepository: INotificationsRepository + + constructor(notificationsRepository: INotificationsRepository) { + this.notificationsRepository = notificationsRepository + } + + /** + * Use case for retrieving the number of unread notifications for the current user. + * + * @returns {Promise} - A promise that resolves to the number of unread notifications. + */ + async execute(): Promise { + return await this.notificationsRepository.getUnreadNotificationsCount() + } +} diff --git a/src/notifications/domain/useCases/MarkNotificationAsRead.ts b/src/notifications/domain/useCases/MarkNotificationAsRead.ts new file mode 100644 index 00000000..017be28c --- /dev/null +++ b/src/notifications/domain/useCases/MarkNotificationAsRead.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { INotificationsRepository } from '../repositories/INotificationsRepository' + +export class MarkNotificationAsRead implements UseCase { + private notificationsRepository: INotificationsRepository + + constructor(notificationsRepository: INotificationsRepository) { + this.notificationsRepository = notificationsRepository + } + + /** + * Use case for marking a notification as read. + * + * @param notificationId - The ID of the notification to mark as read. + * @returns {Promise} - A promise that resolves when the notification is marked as read. + */ + async execute(notificationId: number): Promise { + return await this.notificationsRepository.markNotificationAsRead(notificationId) + } +} diff --git a/src/notifications/index.ts b/src/notifications/index.ts new file mode 100644 index 00000000..3075ee90 --- /dev/null +++ b/src/notifications/index.ts @@ -0,0 +1,21 @@ +import { NotificationsRepository } from './infra/repositories/NotificationsRepository' +import { GetAllNotificationsByUser } from './domain/useCases/GetAllNotificationsByUser' +import { DeleteNotification } from './domain/useCases/DeleteNotification' +import { GetUnreadNotificationsCount } from './domain/useCases/GetUnreadNotificationsCount' +import { MarkNotificationAsRead } from './domain/useCases/MarkNotificationAsRead' + +const notificationsRepository = new NotificationsRepository() + +const getAllNotificationsByUser = new GetAllNotificationsByUser(notificationsRepository) +const deleteNotification = new DeleteNotification(notificationsRepository) +const getUnreadNotificationsCount = new GetUnreadNotificationsCount(notificationsRepository) +const markNotificationAsRead = new MarkNotificationAsRead(notificationsRepository) + +export { + getAllNotificationsByUser, + deleteNotification, + getUnreadNotificationsCount, + markNotificationAsRead +} + +export { Notification, NotificationType, RoleAssignment } from './domain/models/Notification' diff --git a/src/notifications/infra/repositories/NotificationsRepository.ts b/src/notifications/infra/repositories/NotificationsRepository.ts new file mode 100644 index 00000000..f310c34a --- /dev/null +++ b/src/notifications/infra/repositories/NotificationsRepository.ts @@ -0,0 +1,57 @@ +import { ApiRepository } from '../../../core/infra/repositories/ApiRepository' +import { INotificationsRepository } from '../../domain/repositories/INotificationsRepository' +import { Notification } from '../../domain/models/Notification' +import { NotificationPayload } from '../transformers/NotificationPayload' + +export class NotificationsRepository extends ApiRepository implements INotificationsRepository { + private readonly notificationsResourceName: string = 'notifications' + + public async getAllNotificationsByUser( + inAppNotificationFormat?: boolean + ): Promise { + const queryParams = inAppNotificationFormat ? { inAppNotificationFormat: 'true' } : undefined + return this.doGet( + this.buildApiEndpoint(this.notificationsResourceName, 'all'), + true, + queryParams + ) + .then((response) => { + const notifications = response.data.data.notifications + return notifications.map((notification: NotificationPayload) => { + const { dataverseDisplayName, dataverseAlias, ...restNotification } = notification + return { + ...restNotification, + ...(dataverseDisplayName && { collectionDisplayName: dataverseDisplayName }), + ...(dataverseAlias && { collectionAlias: dataverseAlias }) + } + }) as Notification[] + }) + .catch((error) => { + throw error + }) + } + + public async deleteNotification(notificationId: number): Promise { + return this.doDelete( + this.buildApiEndpoint(this.notificationsResourceName, notificationId.toString()) + ) + .then(() => undefined) + .catch((error) => { + throw error + }) + } + + public async getUnreadNotificationsCount(): Promise { + return this.doGet( + this.buildApiEndpoint(this.notificationsResourceName, 'unreadCount'), + true + ).then((response) => response.data.data.unreadCount as number) + } + + public async markNotificationAsRead(notificationId: number): Promise { + return this.doPut( + this.buildApiEndpoint(this.notificationsResourceName, 'markAsRead', notificationId), + {} + ).then(() => undefined) + } +} diff --git a/src/notifications/infra/transformers/NotificationPayload.ts b/src/notifications/infra/transformers/NotificationPayload.ts new file mode 100644 index 00000000..96d381ac --- /dev/null +++ b/src/notifications/infra/transformers/NotificationPayload.ts @@ -0,0 +1,30 @@ +import { RoleAssignment } from '../../domain/models/Notification' + +export interface NotificationPayload { + id: number + type: string + subjectText?: string + messageText?: string + sentTimestamp: string + displayAsRead: boolean + installationBrandName?: string + userGuidesBaseUrl?: string + userGuidesVersion?: string + userGuidesSectionPath?: string + roleAssignments?: RoleAssignment[] + dataverseAlias?: string + dataverseDisplayName?: string + datasetPersistentIdentifier?: string + datasetDisplayName?: string + ownerPersistentIdentifier?: string + ownerAlias?: string + ownerDisplayName?: string + requestorFirstName?: string + requestorLastName?: string + requestorEmail?: string + dataFileId?: number + dataFileDisplayName?: string + currentCurationStatus?: string + additionalInfo?: string + objectDeleted?: boolean +} diff --git a/src/search/domain/models/SearchService.ts b/src/search/domain/models/SearchService.ts new file mode 100644 index 00000000..b895878e --- /dev/null +++ b/src/search/domain/models/SearchService.ts @@ -0,0 +1,4 @@ +export interface SearchService { + name: string + displayName: string +} diff --git a/src/search/domain/repositories/ISearchServicesRepository.ts b/src/search/domain/repositories/ISearchServicesRepository.ts new file mode 100644 index 00000000..f41a477a --- /dev/null +++ b/src/search/domain/repositories/ISearchServicesRepository.ts @@ -0,0 +1,5 @@ +import { SearchService } from '../models/SearchService' + +export interface ISearchServicesRepository { + getSearchServices(): Promise +} diff --git a/src/search/domain/useCases/GetSearchServices.ts b/src/search/domain/useCases/GetSearchServices.ts new file mode 100644 index 00000000..9fa0a0c0 --- /dev/null +++ b/src/search/domain/useCases/GetSearchServices.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase' +import { SearchService } from '../models/SearchService' +import { ISearchServicesRepository } from '../repositories/ISearchServicesRepository' + +export class GetSearchServices implements UseCase { + private searchServicesRepository: ISearchServicesRepository + + constructor(searchServicesRepository: ISearchServicesRepository) { + this.searchServicesRepository = searchServicesRepository + } + + /** + * Returns all search services available in the installation. + * + * @returns {Promise} + */ + async execute(): Promise { + return await this.searchServicesRepository.getSearchServices() + } +} diff --git a/src/search/index.ts b/src/search/index.ts new file mode 100644 index 00000000..56735ae0 --- /dev/null +++ b/src/search/index.ts @@ -0,0 +1,10 @@ +import { GetSearchServices } from './domain/useCases/GetSearchServices' +import { SearchServicesRepository } from './infra/repositories/SearchServicesRepository' + +const searchServicesRepository = new SearchServicesRepository() + +const getSearchServices = new GetSearchServices(searchServicesRepository) + +export { getSearchServices } + +export { SearchService } from './domain/models/SearchService' diff --git a/src/search/infra/repositories/SearchServicesRepository.ts b/src/search/infra/repositories/SearchServicesRepository.ts new file mode 100644 index 00000000..343fb686 --- /dev/null +++ b/src/search/infra/repositories/SearchServicesRepository.ts @@ -0,0 +1,14 @@ +import { ApiRepository } from '../../../core/infra/repositories/ApiRepository' +import { SearchService } from '../../domain/models/SearchService' +import { ISearchServicesRepository } from '../../domain/repositories/ISearchServicesRepository' +import { transformSearchServicesResponseToSearchServices } from './transformers/searchServiceTransformers' + +export class SearchServicesRepository extends ApiRepository implements ISearchServicesRepository { + public async getSearchServices(): Promise { + return this.doGet(`/search/services`) + .then((response) => transformSearchServicesResponseToSearchServices(response)) + .catch((error) => { + throw error + }) + } +} diff --git a/src/search/infra/repositories/transformers/SearchServicePayload.ts b/src/search/infra/repositories/transformers/SearchServicePayload.ts new file mode 100644 index 00000000..b5b45fc4 --- /dev/null +++ b/src/search/infra/repositories/transformers/SearchServicePayload.ts @@ -0,0 +1,4 @@ +export interface SearchServicePayload { + name: string + displayName: string +} diff --git a/src/search/infra/repositories/transformers/searchServiceTransformers.ts b/src/search/infra/repositories/transformers/searchServiceTransformers.ts new file mode 100644 index 00000000..2d379e7c --- /dev/null +++ b/src/search/infra/repositories/transformers/searchServiceTransformers.ts @@ -0,0 +1,24 @@ +import { AxiosResponse } from 'axios' +import { SearchService } from '../../../domain/models/SearchService' +import { SearchServicePayload } from './SearchServicePayload' + +export const transformSearchServicesResponseToSearchServices = ( + response: AxiosResponse +): SearchService[] => { + const searchServicesPayload = response.data.data.services + const searchServices: SearchService[] = [] + searchServicesPayload.forEach(function (searchServicePayload: SearchServicePayload) { + searchServices.push(transformSearchServicePayloadToSearchService(searchServicePayload)) + }) + + return searchServices +} + +const transformSearchServicePayloadToSearchService = ( + searchServicePayload: SearchServicePayload +): SearchService => { + return { + name: searchServicePayload.name, + displayName: searchServicePayload.displayName + } +} diff --git a/test/functional/collections/LinkCollection.test.ts b/test/functional/collections/LinkCollection.test.ts new file mode 100644 index 00000000..eff7550d --- /dev/null +++ b/test/functional/collections/LinkCollection.test.ts @@ -0,0 +1,71 @@ +import { + ApiConfig, + WriteError, + createCollection, + getCollection, + linkCollection, + deleteCollection, + getCollectionItems +} from '../../../src' +import { TestConstants } from '../../testHelpers/TestConstants' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { createCollectionDTO } from '../../testHelpers/collections/collectionHelper' + +describe('execute', () => { + const firstCollectionAlias = 'linkCollection-functional-test-first' + const secondCollectionAlias = 'linkCollection-functional-test-second' + let firstCollectionId: number + let secondCollectionId: number + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + const firstCollection = createCollectionDTO(firstCollectionAlias) + const secondCollection = createCollectionDTO(secondCollectionAlias) + firstCollectionId = await createCollection.execute(firstCollection) + secondCollectionId = await createCollection.execute(secondCollection) + }) + + afterEach(async () => { + await Promise.all([ + deleteCollection.execute(firstCollectionId), + deleteCollection.execute(secondCollectionId) + ]) + }) + + test('should successfully link two collections', async () => { + expect.assertions(1) + try { + await linkCollection.execute(secondCollectionAlias, firstCollectionAlias) + } catch (error) { + throw new Error('Collections should be linked successfully') + } finally { + // Wait for the linking to be processed by Solr + await new Promise((resolve) => setTimeout(resolve, 5000)) + const collectionItemSubset = await getCollectionItems.execute(firstCollectionAlias) + + expect(collectionItemSubset.items.length).toBe(1) + } + }) + + test('should throw an error when linking a non-existent collection', async () => { + const invalidCollectionId = 99999 + const firstCollection = await getCollection.execute(firstCollectionAlias) + + expect.assertions(2) + let writeError: WriteError | undefined = undefined + try { + await linkCollection.execute(invalidCollectionId, firstCollection.id) + throw new Error('Use case should throw an error') + } catch (error) { + writeError = error as WriteError + } finally { + expect(writeError).toBeInstanceOf(WriteError) + expect(writeError?.message).toEqual( + `There was an error when writing the resource. Reason was: [404] Can't find dataverse with identifier='${invalidCollectionId}'` + ) + } + }) +}) diff --git a/test/functional/collections/UnlinkCollection.test.ts b/test/functional/collections/UnlinkCollection.test.ts new file mode 100644 index 00000000..0b20b455 --- /dev/null +++ b/test/functional/collections/UnlinkCollection.test.ts @@ -0,0 +1,71 @@ +import { + ApiConfig, + WriteError, + createCollection, + linkCollection, + deleteCollection, + getCollectionItems, + unlinkCollection +} from '../../../src' +import { TestConstants } from '../../testHelpers/TestConstants' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { createCollectionDTO } from '../../testHelpers/collections/collectionHelper' + +describe('execute', () => { + const firstCollectionAlias = 'unlinkCollection-functional-test-first' + const secondCollectionAlias = 'unlinkCollection-functional-test-second' + + let firstCollectionId: number + let secondCollectionId: number + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + const firstCollectionDTO = createCollectionDTO(firstCollectionAlias) + const secondCollectionDTO = createCollectionDTO(secondCollectionAlias) + firstCollectionId = await createCollection.execute(firstCollectionDTO) + secondCollectionId = await createCollection.execute(secondCollectionDTO) + await linkCollection.execute(secondCollectionAlias, firstCollectionAlias) + // Give enough time to Solr for indexing + await new Promise((resolve) => setTimeout(resolve, 5000)) + }) + + afterEach(async () => { + await Promise.all([ + deleteCollection.execute(firstCollectionId), + deleteCollection.execute(secondCollectionId) + ]) + }) + + test('should successfully unlink two collections', async () => { + // Verify that the collections are linked + const collectionItemSubset = await getCollectionItems.execute(firstCollectionAlias) + expect(collectionItemSubset.items.length).toBe(1) + + await unlinkCollection.execute(secondCollectionAlias, firstCollectionAlias) + // Wait for the unlinking to be processed by Solr + await new Promise((resolve) => setTimeout(resolve, 5000)) + const collectionItemSubset2 = await getCollectionItems.execute(firstCollectionAlias) + expect(collectionItemSubset2.items.length).toBe(0) + }) + + test('should throw an error when unlinking a non-existent collection', async () => { + const invalidCollectionId = 99999 + + expect.assertions(2) + let writeError: WriteError | undefined = undefined + try { + await unlinkCollection.execute(invalidCollectionId, firstCollectionId) + throw new Error('Use case should throw an error') + } catch (error) { + writeError = error as WriteError + } finally { + expect(writeError).toBeInstanceOf(WriteError) + expect(writeError?.message).toEqual( + `There was an error when writing the resource. Reason was: [404] Can't find dataverse with identifier='${invalidCollectionId}'` + ) + } + }) +}) diff --git a/test/functional/datasets/AddDatasetType.test.ts b/test/functional/datasets/AddDatasetType.test.ts new file mode 100644 index 00000000..409c18b9 --- /dev/null +++ b/test/functional/datasets/AddDatasetType.test.ts @@ -0,0 +1,28 @@ +import { ApiConfig, DatasetType, addDatasetType, deleteDatasetType } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('AddDatasetType', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should allow for adding and deleting a dataset type', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const deleted: void = await deleteDatasetType.execute(actual.id as number) + expect(deleted).toEqual({ message: 'deleted' }) + }) + }) +}) diff --git a/test/functional/datasets/CreateDataset.test.ts b/test/functional/datasets/CreateDataset.test.ts index f90eff00..1394a3a4 100644 --- a/test/functional/datasets/CreateDataset.test.ts +++ b/test/functional/datasets/CreateDataset.test.ts @@ -1,5 +1,5 @@ import { createDataset, DatasetDTO } from '../../../src/datasets' -import { ApiConfig } from '../../../src' +import { ApiConfig, WriteError } from '../../../src' import { TestConstants } from '../../testHelpers/TestConstants' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError' @@ -61,6 +61,58 @@ describe('execute', () => { } }) + test('should successfully create a new dataset when a valid dataset type is sent', async () => { + const testNewDataset = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'Dataset created using the createDataset use case', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org' + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataversedemo.org' + } + ], + datasetContact: [ + { + datasetContactEmail: 'finch@mailinator.com', + datasetContactName: 'Finch, Fiona' + } + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of the dataset.' + } + ], + subject: ['Medicine, Health and Life Sciences'] + } + } + ] + } + expect.assertions(3) + + try { + const defaultDatasetType = 'dataset' + const createdDatasetIdentifiers = await createDataset.execute( + testNewDataset, + ':root', + defaultDatasetType + ) + + expect(createdDatasetIdentifiers).not.toBeNull() + expect(createdDatasetIdentifiers.numericId).not.toBeNull() + expect(createdDatasetIdentifiers.persistentId).not.toBeNull() + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + } catch (error) { + throw new Error('Dataset should be created') + } + }) + test('should throw an error when a first level required field is missing', async () => { const testNewDataset = { metadataBlockValues: [ @@ -213,4 +265,52 @@ describe('execute', () => { ) } }) + + test('should throw an error when an invalid dataset type is sent', async () => { + const testNewDataset = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'Dataset created using the createDataset use case', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org' + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataversedemo.org' + } + ], + datasetContact: [ + { + datasetContactEmail: 'finch@mailinator.com', + datasetContactName: 'Finch, Fiona' + } + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of the dataset.' + } + ], + subject: ['Medicine, Health and Life Sciences'] + } + } + ] + } + expect.assertions(1) + let writeError: WriteError | undefined = undefined + try { + const invalidDatasetType = 'doesNotExist' + await createDataset.execute(testNewDataset, ':root', invalidDatasetType) + throw new Error('Use case should throw an error') + } catch (error) { + writeError = error as WriteError + } finally { + expect(writeError?.message).toEqual( + 'There was an error when writing the resource. Reason was: [400] Error parsing Json: Invalid dataset type: doesNotExist' + ) + } + }) }) diff --git a/test/functional/datasets/DeleteDatasetType.test.ts b/test/functional/datasets/DeleteDatasetType.test.ts new file mode 100644 index 00000000..8f447822 --- /dev/null +++ b/test/functional/datasets/DeleteDatasetType.test.ts @@ -0,0 +1,28 @@ +import { ApiConfig, DatasetType, addDatasetType, deleteDatasetType } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('DeleteDatasetType', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should allow for adding and deleting a dataset type', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const deleted: void = await deleteDatasetType.execute(actual.id as number) + expect(deleted).toEqual({ message: 'deleted' }) + }) + }) +}) diff --git a/test/functional/datasets/GetDatasetAvailableCategories.test.ts b/test/functional/datasets/GetDatasetAvailableCategories.test.ts new file mode 100644 index 00000000..39c8cef2 --- /dev/null +++ b/test/functional/datasets/GetDatasetAvailableCategories.test.ts @@ -0,0 +1,37 @@ +import { ApiConfig, createDataset, getDatasetAvailableCategories, ReadError } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' +import { CreatedDatasetIdentifiers } from '../../../src/datasets/domain/models/CreatedDatasetIdentifiers' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('execute', () => { + let createdDatasetIdentifiers: CreatedDatasetIdentifiers + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + createdDatasetIdentifiers = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + }) + + afterEach(async () => { + deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + }) + + it('should return categories array when a dataset has files categories', async () => { + const defaultCategories = ['Code', 'Data', 'Documentation'] + const categoriesList = await getDatasetAvailableCategories.execute( + createdDatasetIdentifiers.numericId + ) + expect(categoriesList.sort()).toEqual(defaultCategories.sort()) + }) + + it('should return error when dataset does not exist', async () => { + const nonExistentDatasetId = 99999 + + await expect( + getDatasetAvailableCategories.execute(nonExistentDatasetId) + ).rejects.toBeInstanceOf(ReadError) + }) +}) diff --git a/test/functional/datasets/GetDatasetAvailableDatasetType.test.ts b/test/functional/datasets/GetDatasetAvailableDatasetType.test.ts new file mode 100644 index 00000000..3d80f35a --- /dev/null +++ b/test/functional/datasets/GetDatasetAvailableDatasetType.test.ts @@ -0,0 +1,30 @@ +import { ApiConfig, DatasetType, getDatasetAvailableDatasetType } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('getDatasetAvailableDatasetType', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should return the default available dataset type', async () => { + const defaultDatasetType = 'dataset' + const actualDatasetType: DatasetType = await getDatasetAvailableDatasetType.execute( + defaultDatasetType + ) + const expectedDatasetType = { + id: 1, + name: 'dataset', + linkedMetadataBlocks: [], + availableLicenses: [] + } + + expect(actualDatasetType).toEqual(expectedDatasetType) + }) + }) +}) diff --git a/test/functional/datasets/GetDatasetAvailableDatasetTypes.test.ts b/test/functional/datasets/GetDatasetAvailableDatasetTypes.test.ts new file mode 100644 index 00000000..14a1a2fd --- /dev/null +++ b/test/functional/datasets/GetDatasetAvailableDatasetTypes.test.ts @@ -0,0 +1,29 @@ +import { ApiConfig, DatasetType, getDatasetAvailableDatasetTypes } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('getDatasetAvailableDatasetTypes', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should return available dataset types', async () => { + const actualDatasetTypes: DatasetType[] = await getDatasetAvailableDatasetTypes.execute() + const expectedDatasetTypes = [ + { + id: 1, + name: 'dataset', + linkedMetadataBlocks: [], + availableLicenses: [] + } + ] + + expect(actualDatasetTypes).toEqual(expectedDatasetTypes) + }) + }) +}) diff --git a/test/functional/datasets/GetDatasetLinkedCollections.test.ts b/test/functional/datasets/GetDatasetLinkedCollections.test.ts new file mode 100644 index 00000000..564bedfe --- /dev/null +++ b/test/functional/datasets/GetDatasetLinkedCollections.test.ts @@ -0,0 +1,62 @@ +import { + ApiConfig, + createDataset, + getDatasetLinkedCollections, + linkDataset, + ReadError +} from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('execute', () => { + const testCollectionAlias = 'getDatasetLinkedCollectionsFunctionalTestCollection' + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + it('should return empty array when no collections are linked', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + const linkedCollections = await getDatasetLinkedCollections.execute( + createdDatasetIdentifiers.numericId + ) + expect(linkedCollections.length).toBe(0) + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + }) + + it('should return linked collections for a dataset', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + await createCollectionViaApi(testCollectionAlias) + + await linkDataset.execute(createdDatasetIdentifiers.numericId, testCollectionAlias) + + const linkedCollections = await getDatasetLinkedCollections.execute( + createdDatasetIdentifiers.numericId + ) + expect(linkedCollections.length).toBe(1) + expect(linkedCollections[0].alias).toBe(testCollectionAlias) + + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + it('should return error when dataset does not exist', async () => { + const nonExistentDatasetId = 99999 + + await expect(getDatasetLinkedCollections.execute(nonExistentDatasetId)).rejects.toBeInstanceOf( + ReadError + ) + }) +}) diff --git a/test/functional/datasets/LinkDataset.test.ts b/test/functional/datasets/LinkDataset.test.ts new file mode 100644 index 00000000..2d514e4f --- /dev/null +++ b/test/functional/datasets/LinkDataset.test.ts @@ -0,0 +1,59 @@ +import { ApiConfig, createDataset, linkDataset, WriteError } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('execute', () => { + const testCollectionAlias = 'linkDatasetFunctionalTestCollection' + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + it('should link a dataset to another collection', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + await createCollectionViaApi(testCollectionAlias) + + const result = await linkDataset.execute( + createdDatasetIdentifiers.numericId, + testCollectionAlias + ) + + expect(result).toBeUndefined() + + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + it('should throw an error when trying to link a dataset to a non-existent collection', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + const nonExistentCollectionAlias = 'nonExistentCollection' + + await expect( + linkDataset.execute(createdDatasetIdentifiers.numericId, nonExistentCollectionAlias) + ).rejects.toBeInstanceOf(WriteError) + + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + }) + + it('should throw an error when trying to link a dataset that does not exist', async () => { + await createCollectionViaApi(testCollectionAlias) + const nonExistentDatasetId = 999999 + await expect( + linkDataset.execute(nonExistentDatasetId, testCollectionAlias) + ).rejects.toBeInstanceOf(WriteError) + + await deleteCollectionViaApi(testCollectionAlias) + }) +}) diff --git a/test/functional/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts b/test/functional/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts new file mode 100644 index 00000000..c6f4c3f8 --- /dev/null +++ b/test/functional/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts @@ -0,0 +1,44 @@ +import { + ApiConfig, + DatasetType, + addDatasetType, + deleteDatasetType, + linkDatasetTypeWithMetadataBlocks +} from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('LinkDatasetTypeWithMetadataBlocks', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should allow for linking a dataset type to metadata blocks', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const linked: void = await linkDatasetTypeWithMetadataBlocks.execute(actual.id as number, [ + 'geospatial' + ]) + expect(linked).toEqual({ + linkedMetadataBlocks: { + before: [], + after: ['geospatial'] + } + }) + + const deleted: void = await deleteDatasetType.execute(actual.id as number) + expect(deleted).toEqual({ message: 'deleted' }) + }) + }) +}) diff --git a/test/functional/datasets/SetAvailableLicensesForDatasetType.test.ts b/test/functional/datasets/SetAvailableLicensesForDatasetType.test.ts new file mode 100644 index 00000000..0c4d6876 --- /dev/null +++ b/test/functional/datasets/SetAvailableLicensesForDatasetType.test.ts @@ -0,0 +1,44 @@ +import { + ApiConfig, + DatasetType, + addDatasetType, + deleteDatasetType, + setAvailableLicensesForDatasetType +} from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('SetAvailableLicensesForDatasetType', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should allow for setting available licenses for a dataset type', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const linked: void = await setAvailableLicensesForDatasetType.execute(actual.id as number, [ + 'CC BY 4.0' + ]) + expect(linked).toEqual({ + availableLicenses: { + before: [], + after: ['CC BY 4.0'] + } + }) + + const deleted: void = await deleteDatasetType.execute(actual.id as number) + expect(deleted).toEqual({ message: 'deleted' }) + }) + }) +}) diff --git a/test/functional/datasets/UnlinkDataset.test.ts b/test/functional/datasets/UnlinkDataset.test.ts new file mode 100644 index 00000000..c02a1127 --- /dev/null +++ b/test/functional/datasets/UnlinkDataset.test.ts @@ -0,0 +1,51 @@ +import { ApiConfig, createDataset, linkDataset, unlinkDataset, WriteError } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('execute', () => { + const testCollectionAlias = 'unlinkDatasetFunctionalTestCollection' + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + it('should unlink a dataset from a collection', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + await createCollectionViaApi(testCollectionAlias) + + await linkDataset.execute(createdDatasetIdentifiers.numericId, testCollectionAlias) + + const result = await unlinkDataset.execute( + createdDatasetIdentifiers.numericId, + testCollectionAlias + ) + + expect(result).toBeUndefined() + + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + it('should throw error when dataset is not linked to the collection', async () => { + const createdDatasetIdentifiers = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO + ) + await createCollectionViaApi(testCollectionAlias) + + await expect( + unlinkDataset.execute(createdDatasetIdentifiers.numericId, testCollectionAlias) + ).rejects.toBeInstanceOf(WriteError) + + await deleteCollectionViaApi(testCollectionAlias) + }) +}) diff --git a/test/functional/licenses/GetAvailableStandardLicenses.test.ts b/test/functional/licenses/GetAvailableStandardLicenses.test.ts new file mode 100644 index 00000000..995551e1 --- /dev/null +++ b/test/functional/licenses/GetAvailableStandardLicenses.test.ts @@ -0,0 +1,51 @@ +import { ApiConfig, getAvailableStandardLicenses, License } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('getAvailableStandardLicenses', () => { + describe('execute', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should return available standard license terms', async () => { + const actualLicenses: License[] = await getAvailableStandardLicenses.execute() + const expectedLicenses: License[] = [ + { + id: 1, + name: 'CC0 1.0', + shortDescription: 'Creative Commons CC0 1.0 Universal Public Domain Dedication.', + uri: 'http://creativecommons.org/publicdomain/zero/1.0', + iconUri: 'https://licensebuttons.net/p/zero/1.0/88x31.png', + active: true, + isDefault: true, + sortOrder: 0, + rightsIdentifier: 'CC0-1.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + }, + { + id: 2, + name: 'CC BY 4.0', + shortDescription: 'Creative Commons Attribution 4.0 International License.', + uri: 'http://creativecommons.org/licenses/by/4.0', + iconUri: 'https://licensebuttons.net/l/by/4.0/88x31.png', + active: true, + isDefault: false, + sortOrder: 2, + rightsIdentifier: 'CC-BY-4.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + } + ] + + expect(actualLicenses).toEqual(expectedLicenses) + }) + }) +}) diff --git a/test/functional/notifications/DeleteNotification.test.ts b/test/functional/notifications/DeleteNotification.test.ts new file mode 100644 index 00000000..093fa637 --- /dev/null +++ b/test/functional/notifications/DeleteNotification.test.ts @@ -0,0 +1,27 @@ +import { ApiConfig, deleteNotification, getAllNotificationsByUser, WriteError } from '../../../src' +import { TestConstants } from '../../testHelpers/TestConstants' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' + +describe('execute', () => { + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should successfully delete a notification for authenticated user', async () => { + const notifications = await getAllNotificationsByUser.execute() + const notificationId = notifications[notifications.length - 1].id + + await deleteNotification.execute(notificationId) + + const notificationsAfterDelete = await getAllNotificationsByUser.execute() + expect(notificationsAfterDelete.length).toBe(notifications.length - 1) + }) + + test('should throw an error when the notification id does not exist', async () => { + await expect(deleteNotification.execute(123)).rejects.toThrow(WriteError) + }) +}) diff --git a/test/functional/notifications/GetAllNotificationsByUser.test.ts b/test/functional/notifications/GetAllNotificationsByUser.test.ts new file mode 100644 index 00000000..7ccd7ec1 --- /dev/null +++ b/test/functional/notifications/GetAllNotificationsByUser.test.ts @@ -0,0 +1,37 @@ +import { ApiConfig, getAllNotificationsByUser, Notification } from '../../../src' +import { TestConstants } from '../../testHelpers/TestConstants' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' + +describe('execute', () => { + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should successfully return notifications for authenticated user', async () => { + const notifications: Notification[] = await getAllNotificationsByUser.execute() + + expect(notifications).not.toBeNull() + expect(Array.isArray(notifications)).toBe(true) + }) + + test('should have correct notification properties if notifications exist', async () => { + const notifications = await getAllNotificationsByUser.execute() + + expect(notifications[0]).toHaveProperty('id') + expect(notifications[0]).toHaveProperty('type') + expect(notifications[0]).toHaveProperty('sentTimestamp') + }) + + test('should have correct in-app notification properties when inAppNotificationFormat is true', async () => { + const notifications = await getAllNotificationsByUser.execute(true) + + expect(notifications[0]).toHaveProperty('id') + expect(notifications[0]).toHaveProperty('type') + expect(notifications[0]).toHaveProperty('sentTimestamp') + expect(notifications[0]).toHaveProperty('displayAsRead') + }) +}) diff --git a/test/functional/search/GetSearchServices.test.ts b/test/functional/search/GetSearchServices.test.ts new file mode 100644 index 00000000..af70aae9 --- /dev/null +++ b/test/functional/search/GetSearchServices.test.ts @@ -0,0 +1,23 @@ +import { ApiConfig, getSearchServices } from '../../../src' +import { TestConstants } from '../../testHelpers/TestConstants' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { SearchService } from '../../../src/search/domain/models/SearchService' + +describe('execute', () => { + beforeEach(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should successfully return search services', async () => { + const searchServices: SearchService[] = await getSearchServices.execute() + + expect(searchServices).toBeDefined() + expect(searchServices.length).toBe(1) + expect(searchServices[0].name).toBe('solr') + expect(searchServices[0].displayName).toBe('Dataverse Standard Search') + }) +}) diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index d869834b..d1afd76d 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -15,7 +15,8 @@ import { createCollection, getDatasetFiles, restrictFile, - deleteFile + deleteFile, + linkDataset } from '../../../src' import { ApiConfig } from '../../../src' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' @@ -784,6 +785,7 @@ describe('CollectionsRepository', () => { undefined, undefined, undefined, + undefined, true ) expect(actual.countPerObjectType?.collections).toBe(1) @@ -1910,4 +1912,137 @@ describe('CollectionsRepository', () => { ).rejects.toThrow(expectedError) }) }) + describe('linkCollection', () => { + const firstCollectionAlias = 'linkCollectionFirst' + const secondCollectionAlias = 'linkCollectionSecond' + + beforeAll(async () => { + await createCollectionViaApi(firstCollectionAlias) + await createCollectionViaApi(secondCollectionAlias) + }) + + afterAll(async () => { + await deleteCollectionViaApi(firstCollectionAlias) + await deleteCollectionViaApi(secondCollectionAlias) + }) + + test('should link a collection successfully', async () => { + const firstCollection = await sut.getCollection(firstCollectionAlias) + await sut.getCollection(secondCollectionAlias) + + await sut.linkCollection(secondCollectionAlias, firstCollectionAlias) + + await sut.getCollection(secondCollectionAlias) + await new Promise((res) => setTimeout(res, 2000)) + const collectionItemSubset = await sut.getCollectionItems(firstCollection.alias) + expect(collectionItemSubset.items.length).toBe(1) + }) + + test('should throw error when linking a non-existent collection', async () => { + const invalidCollectionId = 99999 + const firstCollection = await sut.getCollection(firstCollectionAlias) + + const expectedError = new WriteError("[404] Can't find dataverse with identifier='99999'") + + await expect(sut.linkCollection(invalidCollectionId, firstCollection.id)).rejects.toThrow( + expectedError + ) + }) + }) + + describe('unlinkCollection', () => { + const firstCollectionAlias = 'unlinkCollectionFirst' + const secondCollectionAlias = 'unlinkCollectionSecond' + + beforeAll(async () => { + await createCollectionViaApi(firstCollectionAlias) + await createCollectionViaApi(secondCollectionAlias) + + const firstCollection = await sut.getCollection(firstCollectionAlias) + const secondCollection = await sut.getCollection(secondCollectionAlias) + + await sut.linkCollection(secondCollection.id, firstCollection.id) + }) + + afterAll(async () => { + await deleteCollectionViaApi(firstCollectionAlias) + await deleteCollectionViaApi(secondCollectionAlias) + }) + + test('should unlink a collection successfully', async () => { + const firstCollection = await sut.getCollection(firstCollectionAlias) + const secondCollection = await sut.getCollection(secondCollectionAlias) + + await sut.unlinkCollection(secondCollection.id, firstCollection.id) + await new Promise((res) => setTimeout(res, 2000)) + + await sut.getCollection(secondCollectionAlias) + const collectionItemSubset = await sut.getCollectionItems(firstCollection.alias) + expect(collectionItemSubset.items).toStrictEqual([]) + }) + + test('should throw error when unlinking a non-existent collection', async () => { + const invalidCollectionId = 99999 + const firstCollection = await sut.getCollection(firstCollectionAlias) + + const expectedError = new WriteError("[404] Can't find dataverse with identifier='99999'") + + await expect(sut.unlinkCollection(invalidCollectionId, firstCollection.id)).rejects.toThrow( + expectedError + ) + }) + }) + describe('getCollectionLinks', () => { + const firstCollectionAlias = 'getCollectionLinksFirst' + const secondCollectionAlias = 'getCollectionLinksSecond' + const thirdCollectionAlias = 'getCollectionLinksThird' + const fourthCollectionAlias = 'getCollectionLinksFourth' + let childDatasetNumericId: number + beforeAll(async () => { + await createCollectionViaApi(firstCollectionAlias) + await createCollectionViaApi(secondCollectionAlias) + await createCollectionViaApi(thirdCollectionAlias) + await createCollectionViaApi(fourthCollectionAlias) + const { numericId: createdId } = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + fourthCollectionAlias + ) + childDatasetNumericId = createdId + await sut.linkCollection(secondCollectionAlias, firstCollectionAlias) + await sut.linkCollection(firstCollectionAlias, thirdCollectionAlias) + await sut.linkCollection(firstCollectionAlias, fourthCollectionAlias) + await linkDataset.execute(childDatasetNumericId, firstCollectionAlias) + }) + + afterAll(async () => { + await deleteUnpublishedDatasetViaApi(childDatasetNumericId) + await deleteCollectionViaApi(firstCollectionAlias) + await deleteCollectionViaApi(secondCollectionAlias) + await deleteCollectionViaApi(thirdCollectionAlias) + await deleteCollectionViaApi(fourthCollectionAlias) + }) + + test('should return collection links successfully', async () => { + const firstCollection = await sut.getCollection(firstCollectionAlias) + const collectionLinks = await sut.getCollectionLinks(firstCollection.id) + + expect(collectionLinks.linkedCollections).toHaveLength(1) + + expect(collectionLinks.linkedCollections[0].alias).toBe(secondCollectionAlias) + expect(collectionLinks.collectionsLinkingToThis).toHaveLength(2) + expect(collectionLinks.collectionsLinkingToThis[0].alias).toBe(thirdCollectionAlias) + expect(collectionLinks.collectionsLinkingToThis[1].alias).toBe(fourthCollectionAlias) + expect(collectionLinks.linkedDatasets).toHaveLength(1) + expect(collectionLinks.linkedDatasets[0].title).toBe( + 'Dataset created using the createDataset use case' + ) + }) + + test('should return error when collection does not exist', async () => { + const invalidCollectionId = 99999 + const expectedError = new ReadError("[404] Can't find dataverse with identifier='99999'") + + await expect(sut.getCollectionLinks(invalidCollectionId)).rejects.toThrow(expectedError) + }) + }) }) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index c5c93dcd..af669e7c 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -20,7 +20,14 @@ import { CreatedDatasetIdentifiers, DatasetDTO, DatasetDeaccessionDTO, - publishDataset + publishDataset, + DatasetType, + getDatasetAvailableDatasetTypes, + getDatasetAvailableDatasetType, + addDatasetType, + deleteDatasetType, + linkDatasetTypeWithMetadataBlocks, + setAvailableLicensesForDatasetType } from '../../../src/datasets' import { ApiConfig, WriteError } from '../../../src' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' @@ -51,6 +58,11 @@ import { import { FilesRepository } from '../../../src/files/infra/repositories/FilesRepository' import { DirectUploadClient } from '../../../src/files/infra/clients/DirectUploadClient' import { createTestFileUploadDestination } from '../../testHelpers/files/fileUploadDestinationHelper' +import { CitationFormat } from '../../../src/datasets/domain/models/CitationFormat' +import { + createDatasetTemplateViaApi, + deleteDatasetTemplateViaApi +} from '../../testHelpers/datasets/datasetTemplatesHelper' const TEST_DIFF_DATASET_DTO: DatasetDTO = { license: { @@ -98,6 +110,7 @@ describe('DatasetsRepository', () => { const filesRepositorySut = new FilesRepository() const directUploadSut: DirectUploadClient = new DirectUploadClient(filesRepositorySut) + const defaultDatasetType = 'dataset' beforeAll(async () => { ApiConfig.init( @@ -492,6 +505,113 @@ describe('DatasetsRepository', () => { }) }) + describe('getDatasetCitationInOtherFormats', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + }) + + afterAll(async () => { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + }) + + test('should return citation in BibTeX format', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.BibTeX + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/plain/) + }) + + test('should return citation in BibTeX format using persistent id', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.persistentId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.BibTeX + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/plain/) + }) + + test('should return citation in RIS format', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.RIS + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/plain/) + }) + + test('should return citation in CSLJson format', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.CSLJson + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/application\/json/) + }) + + test('should return citation in EndNote format', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.EndNote + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/xml/) + }) + + test('should return citation in Internal format', async () => { + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.Internal + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/html/) + }) + + test('should return error when dataset does not exist', async () => { + const nonExistentId = 9999999 + const expectedError = new ReadError(`[404] Dataset with ID ${nonExistentId} not found.`) + + await expect( + sut.getDatasetCitationInOtherFormats( + nonExistentId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.RIS + ) + ).rejects.toThrow(expectedError) + }) + + test('should return citation for deaccessioned dataset when includeDeaccessioned = true', async () => { + await publishDatasetViaApi(testDatasetIds.numericId) + await waitForNoLocks(testDatasetIds.numericId, 10) + await deaccessionDatasetViaApi(testDatasetIds.numericId, '1.0') + + const citation = await sut.getDatasetCitationInOtherFormats( + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, + CitationFormat.RIS, + true + ) + + expect(typeof citation.content).toBe('string') + expect(citation.contentType).toMatch(/text\/plain/) + }) + }) + describe('getDatasetVersionDiff', () => { let testDatasetIds: CreatedDatasetIdentifiers @@ -708,6 +828,64 @@ describe('DatasetsRepository', () => { expect(actualCreatedDataset.metadataBlocks[0].fields.subject).toContain( 'Medicine, Health and Life Sciences' ) + // even though we didn't provide a dataset type, it should be created with the default one + expect(actualCreatedDataset.datasetType).toBe(defaultDatasetType) + }) + }) + + describe('createDatasetWithDatasetType', () => { + test('should create a dataset with the provided dataset type', async () => { + const testNewDataset = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'Dataset created using the createDataset use case', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org' + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataversedemo.org' + } + ], + datasetContact: [ + { + datasetContactEmail: 'finch@mailinator.com', + datasetContactName: 'Finch, Fiona' + } + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of the dataset.' + } + ], + subject: ['Medicine, Health and Life Sciences'] + } + } + ] + } + + const metadataBlocksRepository = new MetadataBlocksRepository() + const citationMetadataBlock = await metadataBlocksRepository.getMetadataBlockByName( + 'citation' + ) + const createdDataset = await sut.createDataset( + testNewDataset, + [citationMetadataBlock], + ROOT_COLLECTION_ALIAS, + defaultDatasetType + ) + const actualCreatedDataset = await sut.getDataset( + createdDataset.numericId, + DatasetNotNumberedVersion.LATEST, + false, + false + ) + + expect(actualCreatedDataset.datasetType).toBe(defaultDatasetType) }) }) @@ -954,8 +1132,8 @@ describe('DatasetsRepository', () => { } ]) }) - - test('should throw error if trying to update an outdated internal version dataset', async () => { + // TODO: add this test when https://github.com/IQSS/dataverse-client-javascript/issues/343 is fixed + test.skip('should throw error if trying to update an outdated internal version dataset', async () => { const testDataset = { metadataBlockValues: [ { @@ -1388,4 +1566,297 @@ describe('DatasetsRepository', () => { await expect(sut.deleteDatasetDraft(nonExistentTestDatasetId)).rejects.toThrow(expectedError) }) }) + + describe('linkDataset', () => { + let testDatasetIds: CreatedDatasetIdentifiers + const testCollectionAlias = 'testLinkDatasetCollection' + + beforeAll(async () => { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + await createCollectionViaApi(testCollectionAlias) + }) + + afterAll(async () => { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + test('should link a dataset to another collection', async () => { + const actual = await sut.linkDataset(testDatasetIds.numericId, testCollectionAlias) + + expect(actual).toBeUndefined() + + const linkedCollections = await sut.getDatasetLinkedCollections(testDatasetIds.numericId) + expect(linkedCollections[0].alias).toBe(testCollectionAlias) + }) + + test('should return error when dataset does not exist', async () => { + await expect(sut.linkDataset(nonExistentTestDatasetId, testCollectionAlias)).rejects.toThrow() + }) + + test('should return error when collection does not exist', async () => { + await expect( + sut.linkDataset(testDatasetIds.numericId, 'nonExistentCollectionAlias') + ).rejects.toThrow() + }) + }) + + describe('unlinkDataset', () => { + let testDatasetIds: CreatedDatasetIdentifiers + const testCollectionAlias = 'testUnlinkDatasetCollection' + + beforeAll(async () => { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + await createCollectionViaApi(testCollectionAlias) + }) + + afterAll(async () => { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + test('should unlink a dataset from a collection', async () => { + await sut.linkDataset(testDatasetIds.numericId, testCollectionAlias) + const linkedCollections = await sut.getDatasetLinkedCollections(testDatasetIds.numericId) + expect(linkedCollections[0].alias).toBe(testCollectionAlias) + + const actual = await sut.unlinkDataset(testDatasetIds.numericId, testCollectionAlias) + + expect(actual).toBeUndefined() + const updatedLinkedCollections = await sut.getDatasetLinkedCollections( + testDatasetIds.numericId + ) + expect(updatedLinkedCollections.length).toBe(0) + }) + + test('should return error when dataset does not exist', async () => { + await expect(sut.linkDataset(nonExistentTestDatasetId, testCollectionAlias)).rejects.toThrow() + }) + + test('should return error when collection does not exist', async () => { + await expect( + sut.linkDataset(testDatasetIds.numericId, 'nonExistentCollectionAlias') + ).rejects.toThrow() + }) + + test('should return error when dataset is not linked to the collection', async () => { + await expect( + sut.unlinkDataset(testDatasetIds.numericId, testCollectionAlias) + ).rejects.toThrow() + }) + }) + + describe('getDatasetLinkedCollections', () => { + let testDatasetIds: CreatedDatasetIdentifiers + const testCollectionAlias = 'testGetLinkedCollections' + + beforeAll(async () => { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + await createCollectionViaApi(testCollectionAlias) + }) + + afterAll(async () => { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + await deleteCollectionViaApi(testCollectionAlias) + }) + + test('should return empty array when no collections are linked', async () => { + const linkedCollections = await sut.getDatasetLinkedCollections(testDatasetIds.numericId) + + expect(linkedCollections.length).toBe(0) + }) + + test('should return linked collections for a dataset', async () => { + await sut.linkDataset(testDatasetIds.numericId, testCollectionAlias) + + const linkedCollections = await sut.getDatasetLinkedCollections(testDatasetIds.numericId) + + expect(linkedCollections.length).toBe(1) + expect(linkedCollections[0].alias).toBe(testCollectionAlias) + }) + + test('should return error when dataset does not exist', async () => { + await expect(sut.getDatasetLinkedCollections(nonExistentTestDatasetId)).rejects.toThrow() + }) + }) + + describe('getDatasetAvailableCategories', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + }) + + afterAll(async () => { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + }) + + test('should get available categories', async () => { + const fileMetadata = { + description: 'test description', + directoryLabel: 'directoryLabel', + categories: ['category1', 'category2', 'Documentation', 'Data', 'Code'] + } + + await uploadFileViaApi(testDatasetIds.numericId, testTextFile1Name, fileMetadata) + + const actual = await sut.getDatasetAvailableCategories(testDatasetIds.numericId) + expect(actual.sort()).toEqual(fileMetadata.categories.sort()) + }) + + test('should get available categorie if dataset id is persistent id', async () => { + const fileMetadata = { + description: 'test description', + directoryLabel: 'directoryLabel', + categories: ['category1', 'category2', 'Documentation', 'Data', 'Code'] + } + + const actual = await sut.getDatasetAvailableCategories(testDatasetIds.persistentId) + expect(actual.sort()).toEqual(fileMetadata.categories.sort()) + }) + + test('should return error when dataset does not exist', async () => { + await expect(sut.getDatasetAvailableCategories(nonExistentTestDatasetId)).rejects.toThrow() + }) + }) + + describe('getDatasetTemplates', () => { + const testCollectionAlias = 'testGetDatasetTemplates' + + beforeAll(async () => { + await createCollectionViaApi(testCollectionAlias) + }) + + afterAll(async () => { + await deleteCollectionViaApi(testCollectionAlias) + }) + + test('should return empty dataset templates', async () => { + const actual = await sut.getDatasetTemplates(testCollectionAlias) + + expect(actual.length).toBe(0) + }) + + test('should return dataset templates for a collection', async () => { + const templateCreated = await createDatasetTemplateViaApi(testCollectionAlias) + + const actual = await sut.getDatasetTemplates(testCollectionAlias) + + expect(actual.length).toBe(1) + + expect(actual[0].name).toBe(templateCreated.name) + expect(actual[0].isDefault).toBe(templateCreated.isDefault) + expect(actual[0].datasetMetadataBlocks.length).toBe(1) + expect(actual[0].datasetMetadataBlocks[0].name).toBe('citation') + expect(actual[0].datasetMetadataBlocks[0].fields.author.length).toBe(1) + expect(actual[0].instructions.length).toBe(templateCreated.instructions.length) + + await deleteDatasetTemplateViaApi(actual[0].id) + }) + }) + + describe('getDatasetAvailableDatasetTypes', () => { + test('should return available dataset types', async () => { + const actualDatasetTypes: DatasetType[] = await getDatasetAvailableDatasetTypes.execute() + const expectedDatasetTypes = [ + { + id: 1, + name: 'dataset', + linkedMetadataBlocks: [], + availableLicenses: [] + } + ] + + expect(actualDatasetTypes).toEqual(expectedDatasetTypes) + }) + }) + + describe('getDatasetAvailableDatasetType', () => { + test('should return available the default dataset type', async () => { + const defaultDatasetType = 'dataset' + const actualDatasetType: DatasetType = await getDatasetAvailableDatasetType.execute( + defaultDatasetType + ) + const expectedDatasetType = { + id: 1, + name: 'dataset', + linkedMetadataBlocks: [], + availableLicenses: [] + } + + expect(actualDatasetType).toEqual(expectedDatasetType) + }) + }) + + describe('addDatasetType', () => { + test('should add a dataset type', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + + expect(actual.name).toEqual(randomName) + }) + }) + + describe('deleteDatasetType', () => { + test('should delete a dataset type (after adding it)', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const deleted: void = await deleteDatasetType.execute(actual.id as number) + expect(deleted).toEqual({ message: 'deleted' }) + }) + }) + + describe('linkDatasetTypeWithMetadataBlocks', () => { + test('should allow for linking a dataset type to metadata blocks', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const linked: void = await linkDatasetTypeWithMetadataBlocks.execute(actual.id as number, [ + 'geospatial' + ]) + expect(linked).toEqual({ + linkedMetadataBlocks: { + before: [], + after: ['geospatial'] + } + }) + }) + }) + + describe('setAvailableLicensesForDatasetType', () => { + test('should allow for setting available licenses for a dataset type', async () => { + const randomName = `datasetType-${crypto.randomUUID().slice(0, 6)}` + const actual: DatasetType = await addDatasetType.execute({ + name: randomName, + linkedMetadataBlocks: [], + availableLicenses: [] + }) + expect(actual.name).toEqual(randomName) + + const linked: void = await setAvailableLicensesForDatasetType.execute(actual.id as number, [ + 'CC BY 4.0' + ]) + expect(linked).toEqual({ + availableLicenses: { + before: [], + after: ['CC BY 4.0'] + } + }) + }) + }) }) diff --git a/test/integration/externalTools/ExternalToolsRepository.test.ts b/test/integration/externalTools/ExternalToolsRepository.test.ts new file mode 100644 index 00000000..ef2c9248 --- /dev/null +++ b/test/integration/externalTools/ExternalToolsRepository.test.ts @@ -0,0 +1,230 @@ +import { + ApiConfig, + DataverseApiAuthMechanism +} from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' +import { ExternalToolsRepository } from '../../../src/externalTools/infra/ExternalToolsRepository' +import { + deleteExternalToolViaApi, + createExternalToolViaApi, + CREATE_FILE_EXTERNAL_TOOL_PAYLOAD, + CREATE_DATASET_EXTERNAL_TOOL_PAYLOAD +} from '../../testHelpers/externalTools/externalToolsHelper' +import { createDataset, CreatedDatasetIdentifiers, getDatasetFiles, WriteError } from '../../../src' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' +import { uploadFileViaApi } from '../../testHelpers/files/filesHelper' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' + +describe('ExternalToolsRepository', () => { + const sut: ExternalToolsRepository = new ExternalToolsRepository() + + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + describe('getExternalTools', () => { + test('should return all external tools availables in the installation', async () => { + const createdToolResponse = await createExternalToolViaApi('file') + const actual = await sut.getExternalTools() + + expect(actual.length).toBe(1) + expect(actual[0].id).toBe(createdToolResponse.data.data.id) + + await deleteExternalToolViaApi(createdToolResponse.data.data.id) + }) + + test('should return empty array if no external tools are available', async () => { + const actual = await sut.getExternalTools() + + expect(actual.length).toBe(0) + expect(actual).toStrictEqual([]) + }) + }) + + describe('getFileExternalToolResolved', () => { + const testCollectionAlias = 'getFileExternalToolResolvedFunctionalTestCollection' + let testDatasetIds: CreatedDatasetIdentifiers + const testTextFile1Name = 'test-file-1.txt' + let testFileId: number + let testDatasetExternalToolId: number + let testFileExternalToolId: number + + beforeAll(async () => { + try { + // Create a Collection + await createCollectionViaApi(testCollectionAlias) + // Create a Dataset + testDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + testCollectionAlias + ) + // Upload a file to the Dataset + await uploadFileViaApi(testDatasetIds.numericId, testTextFile1Name) + // Save File Id + const datasetFiles = await getDatasetFiles.execute(testDatasetIds.numericId) + testFileId = datasetFiles.files[0].id + // Create a dataset-level External Tool + const createdExtToolResponse1 = await createExternalToolViaApi('dataset') + testDatasetExternalToolId = createdExtToolResponse1.data.data.id + // Create a file-level External Tool + const createdExtToolResponse2 = await createExternalToolViaApi('file') + testFileExternalToolId = createdExtToolResponse2.data.data.id + } catch (error) { + throw new Error('Tests beforeAll(): Error setting up test data.') + } + }) + + afterAll(async () => { + try { + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) + await deleteCollectionViaApi(testCollectionAlias) + await deleteExternalToolViaApi(testDatasetExternalToolId) + await deleteExternalToolViaApi(testFileExternalToolId) + } catch (error) { + throw new Error('Tests afterAll(): Error cleaning up test data.') + } + }) + + test('should return file external tool resolved', async () => { + const fileExternalToolResolved = await sut.getFileExternalToolResolved( + testFileId, + testFileExternalToolId, + { + preview: true, + locale: 'en' + } + ) + expect(fileExternalToolResolved.fileId).toBe(testFileId) + expect(fileExternalToolResolved.displayName).toBe( + CREATE_FILE_EXTERNAL_TOOL_PAYLOAD.displayName + ) + expect(fileExternalToolResolved.toolUrlResolved).toContain( + CREATE_FILE_EXTERNAL_TOOL_PAYLOAD.toolUrl + ) + expect(fileExternalToolResolved.toolUrlResolved).toContain(`preview=true`) + expect(fileExternalToolResolved.preview).toBe(true) + }) + + test('should return error if file external tool id does not exist', async () => { + await expect( + sut.getFileExternalToolResolved(testFileId, 999999, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [400] External tool not found with id: 999999 + }) + + test('should return error if toolId is not for a file-level external tool', async () => { + await expect( + sut.getFileExternalToolResolved(testFileId, testDatasetExternalToolId, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [400] External tool does not have file scope. + }) + + test('should return error if file id does not exist', async () => { + await expect( + sut.getFileExternalToolResolved(56565656, testFileExternalToolId, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [404] File not found for given id: 56565656 + }) + }) + + describe('getDatasetExternalToolResolved', () => { + const testCollectionAlias = 'getDatasetExternalToolResolvedFunctionalTestCollection' + let testDatasetIds: CreatedDatasetIdentifiers + const testTextFile1Name = 'test-file-1.txt' + let testDatasetExternalToolId: number + let testFileExternalToolId: number + + beforeAll(async () => { + try { + // Create a Collection + await createCollectionViaApi(testCollectionAlias) + // Create a Dataset + testDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + testCollectionAlias + ) + // Upload a file to the Dataset + await uploadFileViaApi(testDatasetIds.numericId, testTextFile1Name) + // Create a dataset-level External Tool + const createdExtToolResponse1 = await createExternalToolViaApi('dataset') + testDatasetExternalToolId = createdExtToolResponse1.data.data.id + // Create a file-level External Tool + const createdExtToolResponse2 = await createExternalToolViaApi('file') + testFileExternalToolId = createdExtToolResponse2.data.data.id + } catch (error) { + throw new Error('Tests beforeAll(): Error setting up test data.') + } + }) + + afterAll(async () => { + try { + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) + await deleteCollectionViaApi(testCollectionAlias) + await deleteExternalToolViaApi(testDatasetExternalToolId) + await deleteExternalToolViaApi(testFileExternalToolId) + } catch (error) { + throw new Error('Tests afterAll(): Error cleaning up test data.') + } + }) + + test('should return dataset external tool resolved', async () => { + const datasetfileExternalToolResolved = await sut.getDatasetExternalToolResolved( + testDatasetIds.numericId, + testDatasetExternalToolId, + { + preview: true, + locale: 'en' + } + ) + expect(datasetfileExternalToolResolved.datasetId).toBe(testDatasetIds.numericId) + expect(datasetfileExternalToolResolved.displayName).toBe( + CREATE_DATASET_EXTERNAL_TOOL_PAYLOAD.displayName + ) + expect(datasetfileExternalToolResolved.toolUrlResolved).toContain( + CREATE_DATASET_EXTERNAL_TOOL_PAYLOAD.toolUrl + ) + expect(datasetfileExternalToolResolved.toolUrlResolved).toContain(`preview=true`) + expect(datasetfileExternalToolResolved.preview).toBe(true) + }) + + test('should return error if dataset external tool id does not exist', async () => { + await expect( + sut.getDatasetExternalToolResolved(testDatasetIds.numericId, 999999, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [400] External tool not found with id: 999999 + }) + + test('should return error if toolId is not for a dataset-level external tool', async () => { + await expect( + sut.getDatasetExternalToolResolved(testDatasetIds.numericId, testFileExternalToolId, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [400] External tool does not have dataset scope. + }) + + test('should return error if dataset id does not exist', async () => { + await expect( + sut.getDatasetExternalToolResolved(56565656, testDatasetExternalToolId, { + preview: true, + locale: 'en' + }) + ).rejects.toThrow(WriteError) // e.g. [404] Dataset not found for given id: 56565656 + }) + }) +}) diff --git a/test/integration/info/DataverseInfoRepository.test.ts b/test/integration/info/DataverseInfoRepository.test.ts index 4fec4669..41487312 100644 --- a/test/integration/info/DataverseInfoRepository.test.ts +++ b/test/integration/info/DataverseInfoRepository.test.ts @@ -73,4 +73,12 @@ describe('DataverseInfoRepository', () => { await deleteApplicationTermsOfUseViaApi() }) }) + + describe('getAvailableDatasetMetadataExportFormats', () => { + test('should return available dataset metadata export formats', async () => { + const actual = await sut.getAvailableDatasetMetadataExportFormats() + + expect(actual).toBeDefined() + }) + }) }) diff --git a/test/integration/licenses/LicensesRepository.test.ts b/test/integration/licenses/LicensesRepository.test.ts new file mode 100644 index 00000000..2f357ccf --- /dev/null +++ b/test/integration/licenses/LicensesRepository.test.ts @@ -0,0 +1,58 @@ +import { + ApiConfig, + DataverseApiAuthMechanism +} from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' +import { LicensesRepository } from '../../../src/licenses/infra/repositories/LicensesRepository' +import { License } from '../../../src/licenses/domain/models/License' + +describe('LicensesRepository', () => { + const sut: LicensesRepository = new LicensesRepository() + + describe('getAvailableStandardLicenses', () => { + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should return list of available standard license terms', async () => { + const actual = await sut.getAvailableStandardLicenses() + + const licenses: License[] = [ + { + id: 1, + name: 'CC0 1.0', + shortDescription: 'Creative Commons CC0 1.0 Universal Public Domain Dedication.', + uri: 'http://creativecommons.org/publicdomain/zero/1.0', + iconUri: 'https://licensebuttons.net/p/zero/1.0/88x31.png', + active: true, + isDefault: true, + sortOrder: 0, + rightsIdentifier: 'CC0-1.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + }, + { + id: 2, + name: 'CC BY 4.0', + shortDescription: 'Creative Commons Attribution 4.0 International License.', + uri: 'http://creativecommons.org/licenses/by/4.0', + iconUri: 'https://licensebuttons.net/l/by/4.0/88x31.png', + active: true, + isDefault: false, + sortOrder: 2, + rightsIdentifier: 'CC-BY-4.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + } + ] + + expect(actual).toEqual(licenses) + }) + }) +}) diff --git a/test/integration/notifications/NotificationsRepository.test.ts b/test/integration/notifications/NotificationsRepository.test.ts new file mode 100644 index 00000000..5333e48d --- /dev/null +++ b/test/integration/notifications/NotificationsRepository.test.ts @@ -0,0 +1,186 @@ +import { + ApiConfig, + DataverseApiAuthMechanism +} from '../../../src/core/infra/repositories/ApiConfig' +import { TestConstants } from '../../testHelpers/TestConstants' +import { NotificationsRepository } from '../../../src/notifications/infra/repositories/NotificationsRepository' +import { + Notification, + NotificationType +} from '../../../src/notifications/domain/models/Notification' +import { createDataset, CreatedDatasetIdentifiers } from '../../../src/datasets' +import { publishDatasetViaApi, waitForNoLocks } from '../../testHelpers/datasets/datasetHelper' +import { WriteError } from '../../../src' +import { createCollection } from '../../../src/collections' +import { + createCollectionDTO, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' + +describe('NotificationsRepository', () => { + const sut: NotificationsRepository = new NotificationsRepository() + let testDatasetIds: CreatedDatasetIdentifiers + + beforeEach(() => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + test('should return notifications after creating and publishing a dataset', async () => { + // Create a dataset and publish it so that a notification of Dataset published is created + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + + await publishDatasetViaApi(testDatasetIds.numericId) + await waitForNoLocks(testDatasetIds.numericId, 10) + + const notifications: Notification[] = await sut.getAllNotificationsByUser() + + expect(Array.isArray(notifications)).toBe(true) + expect(notifications.length).toBeGreaterThan(0) + + const publishedNotification = notifications.find( + (n) => n.type === NotificationType.PUBLISHEDDS + ) as Notification + + expect(publishedNotification).toBeDefined() + + expect(publishedNotification).toHaveProperty('id') + expect(publishedNotification).toHaveProperty('type') + expect(publishedNotification).toHaveProperty('subjectText') + expect(publishedNotification).toHaveProperty('messageText') + expect(publishedNotification).toHaveProperty('sentTimestamp') + + expect(publishedNotification?.subjectText).toContain( + 'Dataset created using the createDataset use case' + ) + expect(publishedNotification?.messageText).toContain( + 'Your dataset named Dataset created using the createDataset use case' + ) + }) + + test('should delete a notification by ID', async () => { + const notifications: Notification[] = await sut.getAllNotificationsByUser() + + const notificationToDelete = notifications[0] + + await sut.deleteNotification(notificationToDelete.id) + + const notificationsAfterDelete: Notification[] = await sut.getAllNotificationsByUser() + const deletedNotification = notificationsAfterDelete.find( + (n) => n.id === notificationToDelete.id + ) + expect(deletedNotification).toBeUndefined() + }) + + test('should throw error when trying to delete notification with wrong ID', async () => { + const nonExistentNotificationId = 99999 + + const expectedError = new WriteError( + `[404] Notification ${nonExistentNotificationId} not found.` + ) + + await expect(sut.deleteNotification(nonExistentNotificationId)).rejects.toThrow(expectedError) + }) + + test('should return notifications with basic properties when inAppNotificationFormat is true', async () => { + const notifications: Notification[] = await sut.getAllNotificationsByUser(true) + + const notification = notifications[0] + expect(notification).toHaveProperty('id') + expect(notification).toHaveProperty('type') + expect(notification).toHaveProperty('sentTimestamp') + expect(notification).toHaveProperty('displayAsRead') + }) + + test('should find notification with ASSIGNROLE type that has not been deleted', async () => { + const notifications: Notification[] = await sut.getAllNotificationsByUser(true) + + const assignRoleNotification = notifications.find( + (n) => n.type === NotificationType.ASSIGNROLE && !n.objectDeleted + ) + + expect(assignRoleNotification).toBeDefined() + expect(assignRoleNotification?.type).toBe(NotificationType.ASSIGNROLE) + expect(assignRoleNotification?.sentTimestamp).toBeDefined() + expect(assignRoleNotification?.displayAsRead).toBeDefined() + expect(assignRoleNotification?.collectionDisplayName).toBeDefined() + + expect(assignRoleNotification?.roleAssignments).toBeDefined() + expect(assignRoleNotification?.roleAssignments?.length).toBeGreaterThan(0) + expect(assignRoleNotification?.roleAssignments?.[0]).toHaveProperty('roleName') + expect(assignRoleNotification?.roleAssignments?.[0]).toHaveProperty('assignee') + expect(assignRoleNotification?.roleAssignments?.[0]).toHaveProperty('roleId') + expect(assignRoleNotification?.roleAssignments?.[0]).toHaveProperty('definitionPointId') + }) + + test('should create a collection and find the notification with CREATEDV type', async () => { + const testCollectionAlias = 'test-notification-collection' + const createdCollectionId = await createCollection.execute( + createCollectionDTO(testCollectionAlias) + ) + + expect(createdCollectionId).toBeDefined() + expect(createdCollectionId).toBeGreaterThan(0) + + const notifications: Notification[] = await sut.getAllNotificationsByUser(true) + expect(Array.isArray(notifications)).toBe(true) + expect(notifications.length).toBeGreaterThan(0) + + const createdvNotification = notifications.find( + (n) => n.collectionAlias === testCollectionAlias + ) + + expect(createdvNotification).toBeDefined() + expect(createdvNotification?.type).toBe(NotificationType.CREATEDV) + expect(createdvNotification?.collectionAlias).toBe(testCollectionAlias) + expect(createdvNotification?.sentTimestamp).toBeDefined() + expect(createdvNotification?.displayAsRead).toBe(false) + expect(createdvNotification?.collectionDisplayName).toBe('Test Collection') + expect(createdvNotification?.collectionAlias).toBe(testCollectionAlias) + + await deleteCollectionViaApi(testCollectionAlias) + }) + + test('should return array when inAppNotificationFormat is false', async () => { + const notifications: Notification[] = await sut.getAllNotificationsByUser(false) + + expect(Array.isArray(notifications)).toBe(true) + }) + + test('should return unread count', async () => { + const unreadCount = await sut.getUnreadNotificationsCount() + + expect(typeof unreadCount).toBe('number') + expect(unreadCount).toBeGreaterThanOrEqual(0) + }) + + test('should mark notification as read successfully', async () => { + const notifications: Notification[] = await sut.getAllNotificationsByUser() + + expect(notifications.length).toBeGreaterThan(0) + + const unreadNotification = notifications[0] + + await expect(sut.markNotificationAsRead(unreadNotification.id)).resolves.toBeUndefined() + + const updatedNotifications: Notification[] = await sut.getAllNotificationsByUser() + const updatedNotification = updatedNotifications.find((n) => n.id === unreadNotification.id) + + expect(updatedNotification?.displayAsRead).toBe(true) + }) + + test('should throw error when marking non-existent notification as read', async () => { + const nonExistentNotificationId = 99999 + + const expectedError = new WriteError( + `[404] Notification ${nonExistentNotificationId} not found.` + ) + + await expect(sut.markNotificationAsRead(nonExistentNotificationId)).rejects.toThrow( + expectedError + ) + }) +}) diff --git a/test/integration/search/SearchServicesRepository.test.ts b/test/integration/search/SearchServicesRepository.test.ts new file mode 100644 index 00000000..198eb0b3 --- /dev/null +++ b/test/integration/search/SearchServicesRepository.test.ts @@ -0,0 +1,25 @@ +import { ApiConfig } from '../../../src' +import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { SearchServicesRepository } from '../../../src/search/infra/repositories/SearchServicesRepository' +import { TestConstants } from '../../testHelpers/TestConstants' + +describe('SearchServicesRepository', () => { + const sut: SearchServicesRepository = new SearchServicesRepository() + + beforeAll(async () => { + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) + }) + + describe('getSearchServices', () => { + test('should return search services', async () => { + const actual = await sut.getSearchServices() + expect(actual.length).toEqual(1) + expect(actual[0].name).toEqual('solr') + expect(actual[0].displayName).toEqual('Dataverse Standard Search') + }) + }) +}) diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index bed1fc69..4cb1ee79 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -514,6 +514,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: true, displayOrder: 0, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -529,6 +530,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 1, typeClass: MetadataFieldTypeClass.Compound, displayOnCreate: true, @@ -544,6 +546,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: true, displayOrder: 2, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -559,6 +562,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: false, + isAdvancedSearchFieldType: true, displayOrder: 3, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -577,6 +581,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 4, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -592,6 +597,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#NAME: #VALUE ', isRequired: false, + isAdvancedSearchFieldType: false, displayOrder: 5, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -607,6 +613,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#NAME: #VALUE ', isRequired: false, + isAdvancedSearchFieldType: false, displayOrder: 5, displayOnCreate: true, typeClass: MetadataFieldTypeClass.Primitive @@ -623,6 +630,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: ':', isRequired: false, + isAdvancedSearchFieldType: false, displayOrder: 6, typeClass: MetadataFieldTypeClass.Compound, displayOnCreate: true, @@ -638,6 +646,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: true, displayFormat: '#VALUE ', isRequired: false, + isAdvancedSearchFieldType: false, displayOrder: 7, displayOnCreate: true, controlledVocabularyValues: [ @@ -673,6 +682,7 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 8, typeClass: MetadataFieldTypeClass.Primitive, displayOnCreate: true @@ -684,9 +694,11 @@ export const createDatasetMetadataBlockModel = (): MetadataBlock => { } export const createNewDatasetRequestPayload = ( - license?: DatasetLicense + license?: DatasetLicense, + datasetType?: string ): NewDatasetRequestPayload => { return { + datasetType, datasetVersion: { ...(license && { license }), metadataBlocks: { diff --git a/test/testHelpers/datasets/datasetTemplatesHelper.ts b/test/testHelpers/datasets/datasetTemplatesHelper.ts new file mode 100644 index 00000000..1cc87300 --- /dev/null +++ b/test/testHelpers/datasets/datasetTemplatesHelper.ts @@ -0,0 +1,67 @@ +import axios from 'axios' +import { TestConstants } from '../TestConstants' +import { DatasetTemplatePayload } from '../../../src/datasets/infra/repositories/transformers/DatasetTemplatePayload' + +const DATASET_TEMPLATE_DTO = { + name: 'Dataset Template', + isDefault: true, + fields: [ + { + typeName: 'author', + value: [ + { + authorName: { + typeName: 'authorName', + value: 'Belicheck, Bill' + }, + authorAffiliation: { + typeName: 'authorIdentifierScheme', + value: 'ORCID' + } + } + ] + } + ], + instructions: [ + { + instructionField: 'author', + instructionText: 'The author data' + } + ] +} + +const DATAVERSE_API_REQUEST_HEADERS = { + headers: { 'Content-Type': 'application/json', 'X-Dataverse-Key': process.env.TEST_API_KEY } +} + +export async function createDatasetTemplateViaApi( + collectionAlias: string +): Promise { + try { + if (collectionAlias == undefined) { + collectionAlias = ':root' + } + return await axios + .post( + `${TestConstants.TEST_API_URL}/dataverses/${collectionAlias}/templates`, + JSON.stringify(DATASET_TEMPLATE_DTO), + DATAVERSE_API_REQUEST_HEADERS + ) + .then((response) => response.data.data) + } catch (error) { + throw new Error(`Error while creating dataset template in collection ${collectionAlias}`) + } +} + +export async function deleteDatasetTemplateViaApi(templateId: number): Promise { + try { + return await axios + .delete( + `${TestConstants.TEST_API_URL}/admin/template/${templateId}`, + DATAVERSE_API_REQUEST_HEADERS + ) + .then((response) => response.data.data) + } catch (error) { + throw new Error(`Error while deleting dataset template with id ${templateId}`) + } +} diff --git a/test/testHelpers/externalTools/externalToolsHelper.ts b/test/testHelpers/externalTools/externalToolsHelper.ts new file mode 100644 index 00000000..0e280d6f --- /dev/null +++ b/test/testHelpers/externalTools/externalToolsHelper.ts @@ -0,0 +1,136 @@ +import axios, { AxiosResponse } from 'axios' +import { + DatasetExternalToolResolved, + ExternalTool, + FileExternalToolResolved, + ToolScope, + ToolType +} from '../../../src' +import { TestConstants } from '../TestConstants' +import { ExternalToolPayload } from '../../../src/externalTools/infra/transformers/ExternalToolPayload' + +const DATAVERSE_API_REQUEST_HEADERS = { + headers: { 'Content-Type': 'application/json', 'X-Dataverse-Key': process.env.TEST_API_KEY } +} + +export const CREATE_FILE_EXTERNAL_TOOL_PAYLOAD: ISetExternalToolViaApi = { + displayName: 'Text File Tool', + toolName: 'textFileTool', + description: 'Text File Tool', + types: [ToolType.Preview], + scope: ToolScope.File, + toolUrl: 'http://example.org/text-tool', + toolParameters: { + queryParameters: [ + { fileid: '{fileId}' }, + { siteUrl: '{siteUrl}' }, + { datasetid: '{datasetId}' }, + { datasetversion: '{datasetVersion}' }, + { locale: '{localeCode}' } + ] + }, + contentType: 'text/plain', + allowedApiCalls: [ + { + name: 'retrieveFileContents', + httpMethod: 'GET', + urlTemplate: '/api/v1/access/datafile/{fileId}', + timeOut: 3600 + } + ] +} + +export const CREATE_DATASET_EXTERNAL_TOOL_PAYLOAD: ISetExternalToolViaApi = { + displayName: 'Dataset Tool', + toolName: 'datasetFileTool', + description: 'Dataset Explore Tool', + types: [ToolType.Explore], + scope: ToolScope.Dataset, + toolUrl: 'http://example.org/dataset-tool', + toolParameters: { + queryParameters: [{ datasetPid: '{datasetPid}' }] + } +} + +export const createExternalToolsModel = (): ExternalTool[] => { + return [ + { + id: 1, + displayName: 'Test External Tool 1', + description: 'Description for Test External Tool 1', + scope: ToolScope.Dataset, + types: [ToolType.Explore] + }, + { + id: 2, + displayName: 'Test External Tool 2', + description: 'Description for Test External Tool 2', + scope: ToolScope.File, + types: [ToolType.Preview] + } + ] +} + +export const createFileExternalToolResolvedModel = (): FileExternalToolResolved => { + return { + toolUrlResolved: 'https://example.com/text-tool?fileId=123', + displayName: 'Test File External Tool', + fileId: 123, + preview: true + } +} + +export const createDatasetExternalToolResolvedModel = (): DatasetExternalToolResolved => { + return { + toolUrlResolved: 'https://example.com/dataset-tool?datasetId=456', + displayName: 'Test Dataset External Tool', + datasetId: 456, + preview: false + } +} + +interface ISetExternalToolViaApi { + displayName: string + toolName: string + description: string + types: ToolType[] + scope: ToolScope + toolUrl: string + toolParameters: { + queryParameters: { [key: string]: string }[] + } + contentType?: string + allowedApiCalls?: { + name: string + httpMethod: 'GET' | 'POST' | 'PUT' | 'DELETE' + urlTemplate: string + timeOut: number + }[] +} + +export async function createExternalToolViaApi( + type: 'dataset' | 'file' +): Promise> { + try { + return await axios.post( + `${TestConstants.TEST_API_URL}/admin/externalTools`, + type === 'dataset' ? CREATE_DATASET_EXTERNAL_TOOL_PAYLOAD : CREATE_FILE_EXTERNAL_TOOL_PAYLOAD, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + console.log(error) + throw new Error('Error while setting external tool via API.') + } +} + +export async function deleteExternalToolViaApi(toolId: number): Promise { + try { + await axios.delete( + `${TestConstants.TEST_API_URL}/externalTools/${toolId}`, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + console.log(error) + throw new Error('Error while deleting external tool via API.') + } +} diff --git a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts index dd041c4c..2d5418e4 100644 --- a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts +++ b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts @@ -26,6 +26,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: MetadataFieldTypeClass.Primitive, displayOnCreate: true @@ -41,6 +42,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: MetadataFieldTypeClass.Compound, displayOnCreate: true, @@ -56,6 +58,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: MetadataFieldTypeClass.Primitive, displayOnCreate: true @@ -71,6 +74,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: MetadataFieldTypeClass.Primitive, displayOnCreate: true @@ -99,6 +103,7 @@ export const createMetadataBlockPayload = (): MetadataBlockPayload => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: 'primitive', displayOnCreate: true @@ -114,6 +119,7 @@ export const createMetadataBlockPayload = (): MetadataBlockPayload => { isControlledVocabulary: false, displayFormat: '', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: 'compound', displayOnCreate: true, @@ -129,6 +135,7 @@ export const createMetadataBlockPayload = (): MetadataBlockPayload => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: 'primitive', displayOnCreate: true @@ -144,6 +151,7 @@ export const createMetadataBlockPayload = (): MetadataBlockPayload => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: 'primitive', displayOnCreate: true @@ -166,6 +174,7 @@ export const createMetadataFieldInfoModel = (): MetadataFieldInfo => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: MetadataFieldTypeClass.Primitive, displayOnCreate: true @@ -184,6 +193,7 @@ export const createMetadataFieldInfoPayload = (): MetadataFieldInfoPayload => { isControlledVocabulary: false, displayFormat: '#VALUE', isRequired: true, + isAdvancedSearchFieldType: false, displayOrder: 0, typeClass: 'primitive', displayOnCreate: true diff --git a/test/testHelpers/roles/roleHelper.ts b/test/testHelpers/roles/roleHelper.ts index cf48cc3c..d792b377 100644 --- a/test/testHelpers/roles/roleHelper.ts +++ b/test/testHelpers/roles/roleHelper.ts @@ -43,6 +43,8 @@ export const createSuperAdminRoleArray = (): Role[] => { 'ManageFilePermissions', 'PublishDataverse', 'PublishDataset', + 'LinkDataverse', + 'LinkDataset', 'DeleteDataverse', 'DeleteDatasetDraft' ], @@ -99,10 +101,11 @@ export const createSuperAdminRoleArray = (): Role[] => { 'ManageDatasetPermissions', 'ManageFilePermissions', 'PublishDataset', + 'LinkDataset', 'DeleteDatasetDraft' ], description: - 'For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.', + 'For datasets, a person who can edit License + Terms, edit Permissions, and publish and link datasets.', id: 7 }, { diff --git a/test/testHelpers/search/searchServiceHelper.ts b/test/testHelpers/search/searchServiceHelper.ts new file mode 100644 index 00000000..afa55193 --- /dev/null +++ b/test/testHelpers/search/searchServiceHelper.ts @@ -0,0 +1,8 @@ +import { SearchService } from '../../../src/search/domain/models/SearchService' + +export const createSearchServiceModelArray = (count: number): SearchService[] => { + return Array.from({ length: count }, (_, index) => ({ + name: `role${index + 1}`, + displayName: `Role ${index + 1}` + })) +} diff --git a/test/unit/collections/GetCollectionItems.test.ts b/test/unit/collections/GetCollectionItems.test.ts index 33c8ecbc..79159da7 100644 --- a/test/unit/collections/GetCollectionItems.test.ts +++ b/test/unit/collections/GetCollectionItems.test.ts @@ -55,6 +55,7 @@ describe('execute', () => { undefined, undefined, undefined, + undefined, false ) expect(actual).toEqual(testItemSubset) @@ -71,6 +72,7 @@ describe('execute', () => { limit, undefined, undefined, + undefined, false ) expect(actual).toEqual(testItemSubset) @@ -87,6 +89,7 @@ describe('execute', () => { undefined, offset, undefined, + undefined, false ) expect(actual).toEqual(testItemSubset) @@ -103,7 +106,7 @@ describe('execute', () => { undefined, undefined, searchCriteria, - false + undefined ) expect(collectionRepositoryStub.getCollectionItems).toHaveBeenCalledWith( @@ -111,6 +114,7 @@ describe('execute', () => { undefined, undefined, searchCriteria, + undefined, false ) expect(actual).toEqual(testItemSubset) @@ -136,6 +140,7 @@ describe('execute', () => { undefined, undefined, undefined, + undefined, showTypeCounts ) @@ -144,6 +149,7 @@ describe('execute', () => { undefined, undefined, undefined, + undefined, showTypeCounts ) expect(actual).toEqual(testItemSubsetWithCount) @@ -164,6 +170,7 @@ describe('execute', () => { limit, offset, searchCriteria, + undefined, false ) @@ -172,6 +179,7 @@ describe('execute', () => { limit, offset, searchCriteria, + undefined, false ) expect(actual).toEqual(testItemSubset) diff --git a/test/unit/collections/LinkCollection.test.ts b/test/unit/collections/LinkCollection.test.ts new file mode 100644 index 00000000..7555caa2 --- /dev/null +++ b/test/unit/collections/LinkCollection.test.ts @@ -0,0 +1,25 @@ +import { ICollectionsRepository } from '../../../src/collections/domain/repositories/ICollectionsRepository' +import { WriteError } from '../../../src' +import { LinkCollection } from '../../../src/collections/domain/useCases/LinkCollection' + +describe('execute', () => { + test('should link collection successfully on repository success', async () => { + const collectionRepositoryStub: ICollectionsRepository = {} as ICollectionsRepository + collectionRepositoryStub.linkCollection = jest.fn().mockResolvedValue(undefined) + + const testLinkCollection = new LinkCollection(collectionRepositoryStub) + + await expect(testLinkCollection.execute(1, 2)).resolves.toBeUndefined() + expect(collectionRepositoryStub.linkCollection).toHaveBeenCalledWith(1, 2) + }) + + test('should throw error on repository failure', async () => { + const collectionRepositoryStub: ICollectionsRepository = {} as ICollectionsRepository + collectionRepositoryStub.linkCollection = jest.fn().mockRejectedValue(new WriteError()) + + const testLinkCollection = new LinkCollection(collectionRepositoryStub) + + await expect(testLinkCollection.execute(1, 2)).rejects.toThrow(WriteError) + expect(collectionRepositoryStub.linkCollection).toHaveBeenCalledWith(1, 2) + }) +}) diff --git a/test/unit/collections/UnlinkCollection.test.ts b/test/unit/collections/UnlinkCollection.test.ts new file mode 100644 index 00000000..907aa769 --- /dev/null +++ b/test/unit/collections/UnlinkCollection.test.ts @@ -0,0 +1,25 @@ +import { ICollectionsRepository } from '../../../src/collections/domain/repositories/ICollectionsRepository' +import { WriteError } from '../../../src' +import { UnlinkCollection } from '../../../src/collections/domain/useCases/UnlinkCollection' + +describe('execute', () => { + test('should unlink collection successfully on repository success', async () => { + const collectionRepositoryStub: ICollectionsRepository = {} as ICollectionsRepository + collectionRepositoryStub.unlinkCollection = jest.fn().mockResolvedValue(undefined) + + const testUnlinkCollection = new UnlinkCollection(collectionRepositoryStub) + + await expect(testUnlinkCollection.execute(1, 2)).resolves.toBeUndefined() + expect(collectionRepositoryStub.unlinkCollection).toHaveBeenCalledWith(1, 2) + }) + + test('should throw error on repository failure', async () => { + const collectionRepositoryStub: ICollectionsRepository = {} as ICollectionsRepository + collectionRepositoryStub.unlinkCollection = jest.fn().mockRejectedValue(new WriteError()) + + const testUnlinkCollection = new UnlinkCollection(collectionRepositoryStub) + + await expect(testUnlinkCollection.execute(1, 2)).rejects.toThrow(WriteError) + expect(collectionRepositoryStub.unlinkCollection).toHaveBeenCalledWith(1, 2) + }) +}) diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 92c73d78..edc69abb 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -51,7 +51,49 @@ describe('execute', () => { expect(datasetsRepositoryStub.createDataset).toHaveBeenCalledWith( testDataset, testMetadataBlocks, - ROOT_COLLECTION_ID + ROOT_COLLECTION_ID, + undefined + ) + }) + + test('should return a dataset type', async () => { + const testCreatedDatasetIdentifiers: CreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1 + } + + const datasetsRepositoryStub = {} + datasetsRepositoryStub.createDataset = jest + .fn() + .mockResolvedValue(testCreatedDatasetIdentifiers) + + const datasetValidatorStub = {} + datasetValidatorStub.validate = jest.fn().mockResolvedValue(undefined) + + const metadataBlocksRepositoryStub = {} + metadataBlocksRepositoryStub.getMetadataBlockByName = jest + .fn() + .mockResolvedValue(testMetadataBlocks[0]) + + const sut = new CreateDataset( + datasetsRepositoryStub, + metadataBlocksRepositoryStub, + datasetValidatorStub + ) + + const actual = await sut.execute(testDataset, ROOT_COLLECTION_ID, 'software') + + expect(actual).toEqual(testCreatedDatasetIdentifiers) + + expect(metadataBlocksRepositoryStub.getMetadataBlockByName).toHaveBeenCalledWith( + testMetadataBlocks[0].name + ) + expect(datasetValidatorStub.validate).toHaveBeenCalledWith(testDataset, testMetadataBlocks) + expect(datasetsRepositoryStub.createDataset).toHaveBeenCalledWith( + testDataset, + testMetadataBlocks, + ROOT_COLLECTION_ID, + 'software' ) }) @@ -111,7 +153,8 @@ describe('execute', () => { expect(datasetsRepositoryStub.createDataset).toHaveBeenCalledWith( testDataset, testMetadataBlocks, - ROOT_COLLECTION_ID + ROOT_COLLECTION_ID, + undefined ) }) diff --git a/test/unit/datasets/DeleteDatasetType.test.ts b/test/unit/datasets/DeleteDatasetType.test.ts new file mode 100644 index 00000000..b8104a7a --- /dev/null +++ b/test/unit/datasets/DeleteDatasetType.test.ts @@ -0,0 +1,23 @@ +import { DeleteDatasetType } from '../../../src/datasets/domain/useCases/DeleteDatasetType' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { WriteError } from '../../../src' + +describe('execute', () => { + test('should return undefined on delete success', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.deleteDatasetType = jest.fn().mockResolvedValue(undefined) + const sut = new DeleteDatasetType(datasetsRepositoryStub) + + const actual = await sut.execute(1) + expect(actual).toEqual(undefined) + }) + + test('should return error result on delete error', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.deleteDatasetType = jest.fn().mockRejectedValue(new WriteError()) + const sut = new DeleteDatasetType(datasetsRepositoryStub) + + const nonExistentDatasetTypeId = 111 + await expect(sut.execute(nonExistentDatasetTypeId)).rejects.toThrow(WriteError) + }) +}) diff --git a/test/unit/datasets/GetDatasetAvailableDatasetType.test.ts b/test/unit/datasets/GetDatasetAvailableDatasetType.test.ts new file mode 100644 index 00000000..d6b4d958 --- /dev/null +++ b/test/unit/datasets/GetDatasetAvailableDatasetType.test.ts @@ -0,0 +1,54 @@ +import { GetDatasetAvailableDatasetType } from '../../../src/datasets/domain/useCases/GetDatasetAvailableDatasetType' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { DatasetType } from '../../../src/datasets/domain/models/DatasetType' +import { ReadError } from '../../../src' + +describe('GetDatasetAvailableDatasetType', () => { + const datasetTypesRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + + const datasetTypeId = 1 + const datasetTypeName = 'dataset' + const expectedDatasetType: DatasetType = { + id: datasetTypeId, + name: datasetTypeName, + linkedMetadataBlocks: [], + availableLicenses: [] + } + + it('should get a dataset type by database id', async () => { + datasetTypesRepositoryStub.getDatasetAvailableDatasetType = jest + .fn() + .mockResolvedValue(expectedDatasetType) + const sut = new GetDatasetAvailableDatasetType(datasetTypesRepositoryStub) + + const actual = await sut.execute(datasetTypeId) + + expect(actual).toEqual(expectedDatasetType) + expect(datasetTypesRepositoryStub.getDatasetAvailableDatasetType).toHaveBeenCalledTimes(1) + }) + + it('should get a dataset type by name', async () => { + datasetTypesRepositoryStub.getDatasetAvailableDatasetType = jest + .fn() + .mockResolvedValue(expectedDatasetType) + const sut = new GetDatasetAvailableDatasetType(datasetTypesRepositoryStub) + + const actual = await sut.execute(datasetTypeName) + + expect(actual).toEqual(expectedDatasetType) + expect(datasetTypesRepositoryStub.getDatasetAvailableDatasetType).toHaveBeenCalledTimes(1) + }) + + test('should return error result on repository error', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + const datasetTypeId = 1 + const expectedError = new ReadError('Failed to fetch dataset type') + datasetsRepositoryStub.getDatasetAvailableDatasetType = jest + .fn() + .mockRejectedValue(expectedError) + const sut = new GetDatasetAvailableDatasetType(datasetsRepositoryStub) + + await expect(sut.execute(datasetTypeId)).rejects.toThrow(ReadError) + expect(datasetsRepositoryStub.getDatasetAvailableDatasetType).toHaveBeenCalledTimes(1) + }) +}) diff --git a/test/unit/datasets/GetDatasetAvailableDatasetTypes.test.ts b/test/unit/datasets/GetDatasetAvailableDatasetTypes.test.ts new file mode 100644 index 00000000..b8768f92 --- /dev/null +++ b/test/unit/datasets/GetDatasetAvailableDatasetTypes.test.ts @@ -0,0 +1,49 @@ +import { ReadError } from '../../../src' +import { DatasetType } from '../../../src' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { GetDatasetAvailableDatasetTypes } from '../../../src/datasets/domain/useCases/GetDatasetAvailableDatasetTypes' + +describe('GetDatasetAvailableDatasetTypes', () => { + describe('execute', () => { + test('should return datasetTypes array on repository success', async () => { + const datasetTypesRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + + const testDatasetTypes: DatasetType[] = [ + { + id: 1, + name: 'dataset', + linkedMetadataBlocks: [], + availableLicenses: [] + }, + { + id: 2, + name: 'software', + linkedMetadataBlocks: ['codeMeta20'], + availableLicenses: ['MIT', 'Apache-2.0'] + } + ] + + datasetTypesRepositoryStub.getDatasetAvailableDatasetTypes = jest + .fn() + .mockResolvedValue(testDatasetTypes) + const sut = new GetDatasetAvailableDatasetTypes(datasetTypesRepositoryStub) + + const actual = await sut.execute() + + expect(actual).toEqual(testDatasetTypes) + expect(datasetTypesRepositoryStub.getDatasetAvailableDatasetTypes).toHaveBeenCalledTimes(1) + }) + + test('should return error result on repository error', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + const expectedError = new ReadError('Failed to fetch dataset types') + datasetsRepositoryStub.getDatasetAvailableDatasetTypes = jest + .fn() + .mockRejectedValue(expectedError) + const sut = new GetDatasetAvailableDatasetTypes(datasetsRepositoryStub) + + await expect(sut.execute()).rejects.toThrow(ReadError) + expect(datasetsRepositoryStub.getDatasetAvailableDatasetTypes).toHaveBeenCalledTimes(1) + }) + }) +}) diff --git a/test/unit/datasets/GetDatasetCitationInOtherFormats.test.ts b/test/unit/datasets/GetDatasetCitationInOtherFormats.test.ts new file mode 100644 index 00000000..60f462f6 --- /dev/null +++ b/test/unit/datasets/GetDatasetCitationInOtherFormats.test.ts @@ -0,0 +1,38 @@ +import { GetDatasetCitationInOtherFormats } from '../../../src/datasets/domain/useCases/GetDatasetCitationInOtherFormats' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { ReadError } from '../../../src/core/domain/repositories/ReadError' +import { CitationFormat } from '../../../src/datasets/domain/models/CitationFormat' +import { DatasetNotNumberedVersion } from '../../../src/datasets/domain/models/DatasetNotNumberedVersion' +import { FormattedCitation } from '../../../src/datasets/domain/models/FormattedCitation' + +describe('GetDatasetCitationInOtherFormats.execute', () => { + const testDatasetId = 1 + const testFormat: CitationFormat = CitationFormat.BibTeX + const testVersion: DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST + + test('should return citation response on repository success', async () => { + const expectedCitation: FormattedCitation = { + content: '@data{example, ...}', + contentType: 'text/plain' + } + + const datasetsRepositoryStub: IDatasetsRepository = { + getDatasetCitationInOtherFormats: jest.fn().mockResolvedValue(expectedCitation) + } as unknown as IDatasetsRepository + + const sut = new GetDatasetCitationInOtherFormats(datasetsRepositoryStub) + + const actual = await sut.execute(testDatasetId, testVersion, testFormat as CitationFormat) + expect(actual).toEqual(expectedCitation) + }) + + test('should throw ReadError on repository failure', async () => { + const datasetsRepositoryStub: IDatasetsRepository = { + getDatasetCitationInOtherFormats: jest.fn().mockRejectedValue(new ReadError()) + } as unknown as IDatasetsRepository + + const sut = new GetDatasetCitationInOtherFormats(datasetsRepositoryStub) + + await expect(sut.execute(testDatasetId, testVersion, testFormat)).rejects.toThrow(ReadError) + }) +}) diff --git a/test/unit/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts b/test/unit/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts new file mode 100644 index 00000000..c284e0c1 --- /dev/null +++ b/test/unit/datasets/LinkDatasetTypeWithMetadataBlocks.test.ts @@ -0,0 +1,27 @@ +import { LinkDatasetTypeWithMetadataBlocks } from '../../../src/datasets/domain/useCases/LinkDatasetTypeWithMetadataBlocks' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { WriteError } from '../../../src' + +describe('execute', () => { + test('should return undefined on link dataset type with metadata block success', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.linkDatasetTypeWithMetadataBlocks = jest + .fn() + .mockResolvedValue(undefined) + const sut = new LinkDatasetTypeWithMetadataBlocks(datasetsRepositoryStub) + + const actual = await sut.execute(1, ['geospatial']) + expect(actual).toEqual(undefined) + }) + + test('should return error result on link dataset type with metadata block error', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.linkDatasetTypeWithMetadataBlocks = jest + .fn() + .mockRejectedValue(new WriteError()) + const sut = new LinkDatasetTypeWithMetadataBlocks(datasetsRepositoryStub) + + const nonExistentDatasetTypeId = 111 + await expect(sut.execute(nonExistentDatasetTypeId, ['geospatial'])).rejects.toThrow(WriteError) + }) +}) diff --git a/test/unit/datasets/SetAvailableLicensesForDatasetType.test.ts b/test/unit/datasets/SetAvailableLicensesForDatasetType.test.ts new file mode 100644 index 00000000..965ebcef --- /dev/null +++ b/test/unit/datasets/SetAvailableLicensesForDatasetType.test.ts @@ -0,0 +1,27 @@ +import { SetAvailableLicensesForDatasetType } from '../../../src/datasets/domain/useCases/SetAvailableLicensesForDatasetType' +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository' +import { WriteError } from '../../../src' + +describe('execute', () => { + test('should return undefined on set available licenses for dataset type success', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.setAvailableLicensesForDatasetType = jest + .fn() + .mockResolvedValue(undefined) + const sut = new SetAvailableLicensesForDatasetType(datasetsRepositoryStub) + + const actual = await sut.execute(1, ['geospatial']) + expect(actual).toEqual(undefined) + }) + + test('should return error result on set available licenses for dataset type error', async () => { + const datasetsRepositoryStub: IDatasetsRepository = {} as IDatasetsRepository + datasetsRepositoryStub.setAvailableLicensesForDatasetType = jest + .fn() + .mockRejectedValue(new WriteError()) + const sut = new SetAvailableLicensesForDatasetType(datasetsRepositoryStub) + + const nonExistentDatasetTypeId = 111 + await expect(sut.execute(nonExistentDatasetTypeId, ['geospatial'])).rejects.toThrow(WriteError) + }) +}) diff --git a/test/unit/datasets/datasetTransformers.test.ts b/test/unit/datasets/datasetTransformers.test.ts index e659f533..7e4185c7 100644 --- a/test/unit/datasets/datasetTransformers.test.ts +++ b/test/unit/datasets/datasetTransformers.test.ts @@ -33,4 +33,28 @@ describe('transformNewDatasetModelToRequestPayload', () => { expect(actual).toEqual(expectedNewDatasetRequestPayload) }) + + it('should correctly transform a new dataset model to a new dataset request payload when it contains a license and a datasetType', () => { + const testDataset = createDatasetDTO( + undefined, + undefined, + undefined, + undefined, + undefined, + createDatasetLicenseModel() + ) + const testMetadataBlocks = [createDatasetMetadataBlockModel()] + const datasetType = 'software' + const expectedNewDatasetRequestPayload = createNewDatasetRequestPayload( + createDatasetLicenseModel(), + datasetType + ) + const actual = transformDatasetModelToNewDatasetRequestPayload( + testDataset, + testMetadataBlocks, + datasetType + ) + + expect(actual).toEqual(expectedNewDatasetRequestPayload) + }) }) diff --git a/test/unit/externalTools/GetDatasetExternalToolUrl.test.ts b/test/unit/externalTools/GetDatasetExternalToolUrl.test.ts new file mode 100644 index 00000000..587eda01 --- /dev/null +++ b/test/unit/externalTools/GetDatasetExternalToolUrl.test.ts @@ -0,0 +1,29 @@ +import { WriteError } from '../../../src' +import { IExternalToolsRepository } from '../../../src/externalTools/domain/repositories/IExternalToolsRepository' +import { GetDatasetExternalToolResolved } from '../../../src/externalTools/domain/useCases/GetDatasetExternalToolResolved' +import { createDatasetExternalToolResolvedModel } from '../../testHelpers/externalTools/externalToolsHelper' + +describe('execute', () => { + test('should return dataset external tool resolved on repository success', async () => { + const testDatasetExternalToolResolved = createDatasetExternalToolResolvedModel() + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getDatasetExternalToolResolved = jest + .fn() + .mockResolvedValue(testDatasetExternalToolResolved) + const sut = new GetDatasetExternalToolResolved(externalToolsRepositoryStub) + + const actual = await sut.execute(123, 3, { preview: true, locale: 'en' }) + + expect(actual).toEqual(testDatasetExternalToolResolved) + }) + + test('should return error result on repository error', async () => { + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getDatasetExternalToolResolved = jest + .fn() + .mockRejectedValue(new WriteError()) + const sut = new GetDatasetExternalToolResolved(externalToolsRepositoryStub) + + await expect(sut.execute(123, 3, { preview: true, locale: 'en' })).rejects.toThrow(WriteError) + }) +}) diff --git a/test/unit/externalTools/GetExternalTools.test.ts b/test/unit/externalTools/GetExternalTools.test.ts new file mode 100644 index 00000000..a0b46762 --- /dev/null +++ b/test/unit/externalTools/GetExternalTools.test.ts @@ -0,0 +1,25 @@ +import { GetExternalTools } from '../../../src/externalTools/domain/useCases/GetExternalTools' +import { IExternalToolsRepository } from '../../../src/externalTools/domain/repositories/IExternalToolsRepository' +import { createExternalToolsModel } from '../../testHelpers/externalTools/externalToolsHelper' +import { ReadError } from '../../../src' + +describe('execute', () => { + test('should return external tools list on repository success', async () => { + const testExternalTools = createExternalToolsModel() + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getExternalTools = jest.fn().mockResolvedValue(testExternalTools) + const sut = new GetExternalTools(externalToolsRepositoryStub) + + const actual = await sut.execute() + + expect(actual).toEqual(testExternalTools) + }) + + test('should return error result on repository error', async () => { + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getExternalTools = jest.fn().mockRejectedValue(new ReadError()) + const sut = new GetExternalTools(externalToolsRepositoryStub) + + await expect(sut.execute()).rejects.toThrow(ReadError) + }) +}) diff --git a/test/unit/externalTools/GetFileExternalToolUrl.test.ts b/test/unit/externalTools/GetFileExternalToolUrl.test.ts new file mode 100644 index 00000000..a55cca29 --- /dev/null +++ b/test/unit/externalTools/GetFileExternalToolUrl.test.ts @@ -0,0 +1,29 @@ +import { WriteError } from '../../../src' +import { IExternalToolsRepository } from '../../../src/externalTools/domain/repositories/IExternalToolsRepository' +import { GetFileExternalToolResolved } from '../../../src/externalTools/domain/useCases/GetFileExternalToolResolved' +import { createFileExternalToolResolvedModel } from '../../testHelpers/externalTools/externalToolsHelper' + +describe('execute', () => { + test('should return file external tool resolved on repository success', async () => { + const testFileExternalToolResolved = createFileExternalToolResolvedModel() + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getFileExternalToolResolved = jest + .fn() + .mockResolvedValue(testFileExternalToolResolved) + const sut = new GetFileExternalToolResolved(externalToolsRepositoryStub) + + const actual = await sut.execute(123, 3, { preview: true, locale: 'en' }) + + expect(actual).toEqual(testFileExternalToolResolved) + }) + + test('should return error result on repository error', async () => { + const externalToolsRepositoryStub: IExternalToolsRepository = {} as IExternalToolsRepository + externalToolsRepositoryStub.getFileExternalToolResolved = jest + .fn() + .mockRejectedValue(new WriteError()) + const sut = new GetFileExternalToolResolved(externalToolsRepositoryStub) + + await expect(sut.execute(123, 3, { preview: true, locale: 'en' })).rejects.toThrow(WriteError) + }) +}) diff --git a/test/unit/info/DataverseInfoRepository.test.ts b/test/unit/info/DataverseInfoRepository.test.ts index fd6dd138..61010810 100644 --- a/test/unit/info/DataverseInfoRepository.test.ts +++ b/test/unit/info/DataverseInfoRepository.test.ts @@ -1,6 +1,6 @@ import axios from 'axios' import { DataverseInfoRepository } from '../../../src/info/infra/repositories/DataverseInfoRepository' -import { ApiConfig, ReadError } from '../../../src' +import { ApiConfig, DatasetMetadataExportFormats, ReadError } from '../../../src' import { TestConstants } from '../../testHelpers/TestConstants' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' @@ -188,4 +188,56 @@ describe('DataverseInfoRepository', () => { expect(error).toBeInstanceOf(Error) }) }) + + describe('getAvailableDatasetMetadataExportFormats', () => { + test('should return available dataset metadata export formats on successful response', async () => { + const formats: DatasetMetadataExportFormats = { + OAI_ORE: { + displayName: 'OAI_ORE', + mediaType: 'application/json', + isHarvestable: false, + isVisibleInUserInterface: true + }, + Datacite: { + displayName: 'DataCite', + mediaType: 'application/xml', + isHarvestable: true, + isVisibleInUserInterface: true, + XMLNameSpace: 'http://datacite.org/schema/kernel-4', + XMLSchemaLocation: + 'http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd', + XMLSchemaVersion: '4.5' + } + } + + const testSuccessfulResponse = { + data: { + status: 'OK', + data: formats + } + } + jest.spyOn(axios, 'get').mockResolvedValue(testSuccessfulResponse) + + const actual = await sut.getAvailableDatasetMetadataExportFormats() + + expect(axios.get).toHaveBeenCalledWith( + `${TestConstants.TEST_API_URL}/info/exportFormats`, + TestConstants.TEST_EXPECTED_UNAUTHENTICATED_REQUEST_CONFIG + ) + expect(actual).toEqual(formats) + }) + + test('should return error result on error response', async () => { + jest.spyOn(axios, 'get').mockRejectedValue(TestConstants.TEST_ERROR_RESPONSE) + + let error: ReadError | undefined + await sut.getAvailableDatasetMetadataExportFormats().catch((e) => (error = e)) + + expect(axios.get).toHaveBeenCalledWith( + `${TestConstants.TEST_API_URL}/info/exportFormats`, + TestConstants.TEST_EXPECTED_UNAUTHENTICATED_REQUEST_CONFIG + ) + expect(error).toBeInstanceOf(Error) + }) + }) }) diff --git a/test/unit/licenses/GetAvailableStandardLicenses.test.ts b/test/unit/licenses/GetAvailableStandardLicenses.test.ts new file mode 100644 index 00000000..4c6857b4 --- /dev/null +++ b/test/unit/licenses/GetAvailableStandardLicenses.test.ts @@ -0,0 +1,62 @@ +import { License, ReadError } from '../../../src' +import { ILicensesRepository } from '../../../src/licenses/domain/repositories/ILicensesRepository' +import { GetAvailableStandardLicenses } from '../../../src/licenses/domain/useCases/GetAvailableStandardLicenses' + +describe('GetAvailableStandardLicenses', () => { + describe('execute', () => { + test('should return licenses array on repository success', async () => { + const licensesRepositoryStub: ILicensesRepository = {} as ILicensesRepository + + const testLicenses: License[] = [ + { + id: 1, + name: 'CC0 1.0', + uri: 'http://creativecommons.org/publicdomain/zero/1.0', + iconUri: 'https://licensebuttons.net/p/zero/1.0/88x31.png', + active: true, + isDefault: true, + sortOrder: 0, + rightsIdentifier: 'CC0-1.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + }, + { + id: 2, + name: 'CC BY 4.0', + uri: 'http://creativecommons.org/licenses/by/4.0', + iconUri: 'https://licensebuttons.net/l/by/4.0/88x31.png', + active: true, + isDefault: false, + sortOrder: 2, + rightsIdentifier: 'CC-BY-4.0', + rightsIdentifierScheme: 'SPDX', + schemeUri: 'https://spdx.org/licenses/', + languageCode: 'en' + } + ] + + licensesRepositoryStub.getAvailableStandardLicenses = jest + .fn() + .mockResolvedValue(testLicenses) + const sut = new GetAvailableStandardLicenses(licensesRepositoryStub) + + const actual = await sut.execute() + + expect(actual).toEqual(testLicenses) + expect(licensesRepositoryStub.getAvailableStandardLicenses).toHaveBeenCalledTimes(1) + }) + + test('should return error result on repository error', async () => { + const licensesRepositoryStub: ILicensesRepository = {} as ILicensesRepository + const expectedError = new ReadError('Failed to fetch licenses') + licensesRepositoryStub.getAvailableStandardLicenses = jest + .fn() + .mockRejectedValue(expectedError) + const sut = new GetAvailableStandardLicenses(licensesRepositoryStub) + + await expect(sut.execute()).rejects.toThrow(ReadError) + expect(licensesRepositoryStub.getAvailableStandardLicenses).toHaveBeenCalledTimes(1) + }) + }) +}) diff --git a/test/unit/notifications/DeleteNotification.test.ts b/test/unit/notifications/DeleteNotification.test.ts new file mode 100644 index 00000000..d568b975 --- /dev/null +++ b/test/unit/notifications/DeleteNotification.test.ts @@ -0,0 +1,49 @@ +import { DeleteNotification } from '../../../src/notifications/domain/useCases/DeleteNotification' +import { INotificationsRepository } from '../../../src/notifications/domain/repositories/INotificationsRepository' +import { + Notification, + NotificationType +} from '../../../src/notifications/domain/models/Notification' + +const mockNotifications: Notification[] = [ + { + id: 1, + type: NotificationType.PUBLISHEDDS, + subjectText: 'Test notification', + messageText: 'Test message', + sentTimestamp: '2025-01-01T00:00:00Z', + displayAsRead: false + }, + { + id: 2, + type: NotificationType.ASSIGNROLE, + subjectText: 'Role assignment', + messageText: 'Role assigned', + sentTimestamp: '2025-01-01T00:00:00Z', + displayAsRead: false + } +] + +describe('execute', () => { + test('should delete notification from repository', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.getAllNotificationsByUser = jest.fn().mockResolvedValue([]) + notificationsRepositoryStub.deleteNotification = jest.fn().mockResolvedValue(mockNotifications) + const sut = new DeleteNotification(notificationsRepositoryStub) + + await sut.execute(123) + + expect(notificationsRepositoryStub.deleteNotification).toHaveBeenCalledWith(123) + }) + + test('should throw error when repository throws error', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.getAllNotificationsByUser = jest.fn().mockResolvedValue([]) + notificationsRepositoryStub.deleteNotification = jest + .fn() + .mockRejectedValue(new Error('Repository error')) + const sut = new DeleteNotification(notificationsRepositoryStub) + + await expect(sut.execute(123)).rejects.toThrow('Repository error') + }) +}) diff --git a/test/unit/notifications/GetAllNotificationsByUser.test.ts b/test/unit/notifications/GetAllNotificationsByUser.test.ts new file mode 100644 index 00000000..7df67bca --- /dev/null +++ b/test/unit/notifications/GetAllNotificationsByUser.test.ts @@ -0,0 +1,49 @@ +import { GetAllNotificationsByUser } from '../../../src/notifications/domain/useCases/GetAllNotificationsByUser' +import { INotificationsRepository } from '../../../src/notifications/domain/repositories/INotificationsRepository' +import { + Notification, + NotificationType +} from '../../../src/notifications/domain/models/Notification' + +const mockNotifications: Notification[] = [ + { + id: 1, + type: NotificationType.PUBLISHEDDS, + subjectText: 'Test notification', + messageText: 'Test message', + sentTimestamp: '2025-01-01T00:00:00Z', + displayAsRead: false + }, + { + id: 2, + type: NotificationType.ASSIGNROLE, + subjectText: 'Role assignment', + messageText: 'Role assigned', + sentTimestamp: '2025-01-01T00:00:00Z', + displayAsRead: false + } +] + +describe('execute', () => { + test('should return notifications from repository', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.getAllNotificationsByUser = jest + .fn() + .mockResolvedValue(mockNotifications) + const sut = new GetAllNotificationsByUser(notificationsRepositoryStub) + + const result = await sut.execute() + + expect(result).toEqual(mockNotifications) + }) + + test('should throw error when repository throws error', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.getAllNotificationsByUser = jest + .fn() + .mockRejectedValue(new Error('Repository error')) + const sut = new GetAllNotificationsByUser(notificationsRepositoryStub) + + await expect(sut.execute()).rejects.toThrow('Repository error') + }) +}) diff --git a/test/unit/notifications/GetUnreadCount.test.ts b/test/unit/notifications/GetUnreadCount.test.ts new file mode 100644 index 00000000..7a54f9e9 --- /dev/null +++ b/test/unit/notifications/GetUnreadCount.test.ts @@ -0,0 +1,27 @@ +import { GetUnreadNotificationsCount } from '../../../src/notifications/domain/useCases/GetUnreadNotificationsCount' +import { INotificationsRepository } from '../../../src/notifications/domain/repositories/INotificationsRepository' +import { ReadError } from '../../../src' + +describe('GetUnreadNotificationsCount', () => { + test('should return unread count from repository', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + + notificationsRepositoryStub.getUnreadNotificationsCount = jest.fn().mockResolvedValue(5) + const sut = new GetUnreadNotificationsCount(notificationsRepositoryStub) + + const result = await sut.execute() + + expect(notificationsRepositoryStub.getUnreadNotificationsCount).toHaveBeenCalledWith() + expect(result).toBe(5) + }) + + test('should throw error when repository throws error', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.getUnreadNotificationsCount = jest + .fn() + .mockRejectedValue(new ReadError()) + const sut = new GetUnreadNotificationsCount(notificationsRepositoryStub) + + await expect(sut.execute()).rejects.toThrow(ReadError) + }) +}) diff --git a/test/unit/notifications/MarkAsRead.test.ts b/test/unit/notifications/MarkAsRead.test.ts new file mode 100644 index 00000000..a1f57a59 --- /dev/null +++ b/test/unit/notifications/MarkAsRead.test.ts @@ -0,0 +1,26 @@ +import { MarkNotificationAsRead } from '../../../src/notifications/domain/useCases/MarkNotificationAsRead' +import { INotificationsRepository } from '../../../src/notifications/domain/repositories/INotificationsRepository' +import { ReadError } from '../../../src' + +describe('MarkNotificationAsRead', () => { + test('should mark notification as read in repository', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + + notificationsRepositoryStub.markNotificationAsRead = jest.fn().mockResolvedValue(undefined) + const sut = new MarkNotificationAsRead(notificationsRepositoryStub) + + await sut.execute(123) + + expect(notificationsRepositoryStub.markNotificationAsRead).toHaveBeenCalledWith(123) + }) + + test('should throw error when repository throws error', async () => { + const notificationsRepositoryStub: INotificationsRepository = {} as INotificationsRepository + notificationsRepositoryStub.markNotificationAsRead = jest + .fn() + .mockRejectedValue(new ReadError()) + const sut = new MarkNotificationAsRead(notificationsRepositoryStub) + + await expect(sut.execute(123)).rejects.toThrow(ReadError) + }) +}) diff --git a/test/unit/search/GetSearchServices.test.ts b/test/unit/search/GetSearchServices.test.ts new file mode 100644 index 00000000..ea3e9a1b --- /dev/null +++ b/test/unit/search/GetSearchServices.test.ts @@ -0,0 +1,25 @@ +import { ReadError } from '../../../src' +import { ISearchServicesRepository } from '../../../src/search/domain/repositories/ISearchServicesRepository' +import { GetSearchServices } from '../../../src/search/domain/useCases/GetSearchServices' +import { createSearchServiceModelArray } from '../../testHelpers/search/searchServiceHelper' + +describe('execute', () => { + test('should return search services array on repository success', async () => { + const searchServicesRepositoryStub: ISearchServicesRepository = {} as ISearchServicesRepository + const testServices = createSearchServiceModelArray(5) + searchServicesRepositoryStub.getSearchServices = jest.fn().mockResolvedValue(testServices) + const sut = new GetSearchServices(searchServicesRepositoryStub) + + const actual = await sut.execute() + + expect(actual).toEqual(testServices) + }) + + test('should return error result on repository error', async () => { + const searchServicesRepositoryStub: ISearchServicesRepository = {} as ISearchServicesRepository + searchServicesRepositoryStub.getSearchServices = jest.fn().mockRejectedValue(new ReadError()) + const sut = new GetSearchServices(searchServicesRepositoryStub) + + await expect(sut.execute()).rejects.toThrow(ReadError) + }) +})