From e42ea088e466de6152334e89044e200e6fabd8c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Bitard?= <bitard.michael@gmail.com> Date: Tue, 9 Jul 2024 09:16:22 +0200 Subject: [PATCH] chore(api): migre de fp-ts vers effect (#1309) --- package-lock.json | 12 +- packages/api/package.json | 6 +- .../src/api/rest/administrations.queries.ts | 17 +- packages/api/src/api/rest/administrations.ts | 67 +-- packages/api/src/api/rest/etapes.ts | 29 +- packages/api/src/api/rest/logs.queries.ts | 8 +- .../api/src/api/rest/perimetre.queries.ts | 72 +-- packages/api/src/api/rest/perimetre.ts | 565 ++++++++---------- .../processes/titres-etapes-areas-update.ts | 11 +- packages/api/src/pg-database.ts | 23 +- packages/api/src/server/rest.ts | 51 +- packages/api/src/tools/fp-tools.ts | 37 +- 12 files changed, 415 insertions(+), 483 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7a4d8d0fe..816d93106 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14316,6 +14316,11 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, + "node_modules/effect": { + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/effect/-/effect-3.4.7.tgz", + "integrity": "sha512-WnxvyX4fksdotDHBXcCfZpxFlY77wMaWP/GIRnr7aJZj9U+k+4QRWd1Ee/OU/jlswyC+MlO5YXilGfcz6WN8Lg==" + }, "node_modules/ejs": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", @@ -16532,11 +16537,6 @@ "node": ">= 0.6" } }, - "node_modules/fp-ts": { - "version": "2.16.6", - "resolved": "https://registry.npmjs.org/fp-ts/-/fp-ts-2.16.6.tgz", - "integrity": "sha512-v7w209VPj4L6pPn/ftFRJu31Oa8QagwcVw7BZmLCUWU4AQoc954rX9ogSIahDf67Pg+GjPbkW/Kn9XWnlWJG0g==" - }, "node_modules/frac": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz", @@ -33417,10 +33417,10 @@ "cors": "^2.8.5", "decimal.js": "^10.4.3", "dotenv": "^16.4.5", + "effect": "^3.4.7", "express": "^4.19.2", "express-jwt": "^8.4.1", "express-rate-limit": "^7.2.0", - "fp-ts": "^2.16.6", "graphql": "^16.8.1", "graphql-fields": "^2.0.3", "graphql-http": "^1.22.1", diff --git a/packages/api/package.json b/packages/api/package.json index 90b4b78f3..80e2e2a94 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -64,10 +64,10 @@ "cors": "^2.8.5", "decimal.js": "^10.4.3", "dotenv": "^16.4.5", + "effect": "^3.4.7", "express": "^4.19.2", "express-jwt": "^8.4.1", "express-rate-limit": "^7.2.0", - "fp-ts": "^2.16.6", "graphql": "^16.8.1", "graphql-fields": "^2.0.3", "graphql-http": "^1.22.1", @@ -123,8 +123,8 @@ }, "eslintConfig": { "globals": { - "GeoJSON": "readonly" - }, + "GeoJSON": "readonly" + }, "parser": "@typescript-eslint/parser", "parserOptions": { "project": true diff --git a/packages/api/src/api/rest/administrations.queries.ts b/packages/api/src/api/rest/administrations.queries.ts index ac131b1dc..cc71b3f92 100644 --- a/packages/api/src/api/rest/administrations.queries.ts +++ b/packages/api/src/api/rest/administrations.queries.ts @@ -1,6 +1,6 @@ /* eslint-disable no-restricted-syntax */ import { Pool } from 'pg' -import { DbQueryAccessError, Redefine, dbQueryAndValidate, newDbQueryAndValidate } from '../../pg-database.js' +import { DbQueryAccessError, Redefine, dbQueryAndValidate, effectDbQueryAndValidate } from '../../pg-database.js' import { sql } from '@pgtyped/runtime' import { AdministrationId, administrationIdValidator } from 'camino-common/src/static/administrations.js' import { AdministrationActiviteTypeEmail, administrationActiviteTypeEmailValidator } from 'camino-common/src/administrations.js' @@ -15,10 +15,9 @@ import { AdminUserNotNull, adminUserNotNullValidator } from 'camino-common/src/r import { ActivitesTypesId } from 'camino-common/src/static/activitesTypes.js' import { NonEmptyArray } from 'camino-common/src/typescript-tools.js' import { z } from 'zod' -import TE from 'fp-ts/lib/TaskEither.js' import { CaminoError } from 'camino-common/src/zod-tools.js' import { ZodUnparseable } from '../../tools/fp-tools.js' -import { pipe } from 'fp-ts/lib/function.js' +import { Effect, pipe } from 'effect' export const getUtilisateursByAdministrationId = async (pool: Pool, administrationId: AdministrationId): Promise<AdminUserNotNull[]> => { const result = await dbQueryAndValidate(getUtilisateursByAdministrationIdDb, { administrationId }, pool, getUtilisateursByAdministrationIdDbValidator) @@ -63,10 +62,10 @@ export const deleteAdministrationActiviteTypeEmail = ( pool: Pool, administrationId: AdministrationId, administrationActiviteTypeEmail: AdministrationActiviteTypeEmail -): TE.TaskEither<CaminoError<ZodUnparseable | DbQueryAccessError>, boolean> => { +): Effect.Effect<boolean, CaminoError<ZodUnparseable | DbQueryAccessError>> => { return pipe( - newDbQueryAndValidate(deleteAdministrationActiviteTypeEmailDb, { administrationId, ...administrationActiviteTypeEmail }, pool, z.void()), - TE.map(() => true) + effectDbQueryAndValidate(deleteAdministrationActiviteTypeEmailDb, { administrationId, ...administrationActiviteTypeEmail }, pool, z.void()), + Effect.map(() => true) ) } @@ -83,10 +82,10 @@ export const insertAdministrationActiviteTypeEmail = ( pool: Pool, administrationId: AdministrationId, administrationActiviteTypeEmail: AdministrationActiviteTypeEmail -): TE.TaskEither<CaminoError<ZodUnparseable | DbQueryAccessError>, boolean> => { +): Effect.Effect<boolean, CaminoError<ZodUnparseable | DbQueryAccessError>> => { return pipe( - newDbQueryAndValidate(insertAdministrationActiviteTypeEmailDb, { administrationId, ...administrationActiviteTypeEmail }, pool, z.void()), - TE.map(() => true) + effectDbQueryAndValidate(insertAdministrationActiviteTypeEmailDb, { administrationId, ...administrationActiviteTypeEmail }, pool, z.void()), + Effect.map(() => true) ) } diff --git a/packages/api/src/api/rest/administrations.ts b/packages/api/src/api/rest/administrations.ts index 3ac6ec07b..ce83fb935 100644 --- a/packages/api/src/api/rest/administrations.ts +++ b/packages/api/src/api/rest/administrations.ts @@ -1,7 +1,6 @@ import { Request as JWTRequest } from 'express-jwt' import { HTTP_STATUS } from 'camino-common/src/http.js' -import TE from 'fp-ts/lib/TaskEither.js' import { CustomResponse } from './express-type.js' import { AdminUserNotNull, User, UserNotNull } from 'camino-common/src/roles.js' import { Pool } from 'pg' @@ -10,10 +9,10 @@ import { canReadAdministrations } from 'camino-common/src/permissions/administra import { deleteAdministrationActiviteTypeEmail, getActiviteTypeEmailsByAdministrationId, getUtilisateursByAdministrationId, insertAdministrationActiviteTypeEmail } from './administrations.queries.js' import { AdministrationActiviteTypeEmail } from 'camino-common/src/administrations.js' import { CaminoApiError } from '../../types.js' -import { pipe } from 'fp-ts/lib/function.js' import { ZodUnparseable } from '../../tools/fp-tools.js' -import { DeepReadonly, exhaustiveCheck } from 'camino-common/src/typescript-tools.js' +import { DeepReadonly } from 'camino-common/src/typescript-tools.js' import { DbQueryAccessError } from '../../pg-database.js' +import { Effect, Match } from 'effect' export const getAdministrationUtilisateurs = (pool: Pool) => async (req: JWTRequest<User>, res: CustomResponse<AdminUserNotNull[]>) => { const user = req.auth @@ -62,28 +61,21 @@ export const addAdministrationActiviteTypeEmails = ( user: DeepReadonly<UserNotNull>, body: DeepReadonly<AdministrationActiviteTypeEmail>, params: { administrationId: AdministrationId } -): TE.TaskEither<CaminoApiError<'Accès interdit' | ZodUnparseable | DbQueryAccessError>, boolean> => { - return pipe( - TE.Do, - TE.filterOrElseW( +): Effect.Effect<boolean, CaminoApiError<'Accès interdit' | ZodUnparseable | DbQueryAccessError>> => { + return Effect.Do.pipe( + Effect.filterOrFail( () => canReadAdministrations(user), () => ({ message: 'Accès interdit' as const }) ), - TE.flatMap(() => insertAdministrationActiviteTypeEmail(pool, params.administrationId, body)), - TE.mapLeft(caminoError => { - const message = caminoError.message - switch (message) { - case 'Accès interdit': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN } - case "Impossible d'accéder à la base de données": - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR } - case 'Problème de validation de données': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } - default: - exhaustiveCheck(message) - throw new Error('impossible') - } - }) + Effect.flatMap(() => insertAdministrationActiviteTypeEmail(pool, params.administrationId, body)), + Effect.mapError(caminoError => + Match.value(caminoError.message).pipe( + Match.when('Accès interdit', () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN })), + Match.when('Problème de validation de données', () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST })), + Match.when("Impossible d'accéder à la base de données", () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })), + Match.exhaustive + ) + ) ) } @@ -92,27 +84,20 @@ export const deleteAdministrationActiviteTypeEmails = ( user: DeepReadonly<UserNotNull>, body: DeepReadonly<AdministrationActiviteTypeEmail>, params: { administrationId: AdministrationId } -): TE.TaskEither<CaminoApiError<'Accès interdit' | ZodUnparseable | DbQueryAccessError>, boolean> => { - return pipe( - TE.Do, - TE.filterOrElseW( +): Effect.Effect<boolean, CaminoApiError<'Accès interdit' | ZodUnparseable | DbQueryAccessError>> => { + return Effect.Do.pipe( + Effect.filterOrFail( () => canReadAdministrations(user), () => ({ message: 'Accès interdit' as const }) ), - TE.flatMap(() => deleteAdministrationActiviteTypeEmail(pool, params.administrationId, body)), - TE.mapLeft(caminoError => { - const message = caminoError.message - switch (message) { - case 'Accès interdit': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN } - case "Impossible d'accéder à la base de données": - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR } - case 'Problème de validation de données': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } - default: - exhaustiveCheck(message) - throw new Error('impossible') - } - }) + Effect.flatMap(() => deleteAdministrationActiviteTypeEmail(pool, params.administrationId, body)), + Effect.mapError(caminoError => + Match.value(caminoError.message).pipe( + Match.when('Accès interdit', () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN })), + Match.when('Problème de validation de données', () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST })), + Match.when("Impossible d'accéder à la base de données", () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })), + Match.exhaustive + ) + ) ) } diff --git a/packages/api/src/api/rest/etapes.ts b/packages/api/src/api/rest/etapes.ts index 5ed1aa081..da733519c 100644 --- a/packages/api/src/api/rest/etapes.ts +++ b/packages/api/src/api/rest/etapes.ts @@ -77,7 +77,7 @@ import { FeatureMultiPolygon, FeatureCollectionPoints } from 'camino-common/src/ import { canHaveForages } from 'camino-common/src/permissions/titres.js' import { SecteursMaritimes, getSecteurMaritime } from 'camino-common/src/static/facades.js' import { GEO_SYSTEME_IDS } from 'camino-common/src/static/geoSystemes.js' -import { isRight } from 'fp-ts/lib/Either.js' +import { callAndExit } from '../../tools/fp-tools.js' export const getEtapeEntrepriseDocuments = (pool: Pool) => @@ -382,12 +382,9 @@ const getForagesProperties = async ( pool: Pool ): Promise<Pick<GraphqlEtape, 'geojson4326Forages' | 'geojsonOrigineForages'>> => { if (canHaveForages(titreTypeId) && isNotNullNorUndefined(geojsonOrigineForages) && isNotNullNorUndefined(geojsonOrigineGeoSystemeId)) { - const conversion = await convertPoints(pool, geojsonOrigineGeoSystemeId, GEO_SYSTEME_IDS.WGS84, geojsonOrigineForages)() - if (isRight(conversion)) { - return { geojson4326Forages: conversion.right, geojsonOrigineForages } - } else { - throw new Error(conversion.left.message) - } + return callAndExit(convertPoints(pool, geojsonOrigineGeoSystemeId, GEO_SYSTEME_IDS.WGS84, geojsonOrigineForages), async value => { + return { geojson4326Forages: value, geojsonOrigineForages } + }) } return { @@ -416,9 +413,8 @@ const getPerimetreInfosInternal = async ( throw new Error(`les points doivent être sur le périmètre`) } } - const result = await getGeojsonInformation(pool, geojson4326Perimetre.geometry)() - if (isRight(result)) { - const { communes, sdom, surface, forets, secteurs } = result.right + + return callAndExit(getGeojsonInformation(pool, geojson4326Perimetre.geometry), async ({ communes, sdom, surface, forets, secteurs }) => { const { geojson4326Forages } = await getForagesProperties(titreTypeId, geojsonOrigineGeoSystemeId, geojsonOrigineForages, pool) return { @@ -430,9 +426,7 @@ const getPerimetreInfosInternal = async ( geojson4326Forages, geojsonOrigineForages, } - } else { - throw new Error(result.left.message) - } + }) } else { return { communes: [], @@ -889,15 +883,10 @@ export const deposeEtape = (pool: Pool) => async (req: CaminoRequest, res: Custo const sdomZones: SDOMZoneId[] = [] const communes: CommuneId[] = [] if (isNotNullNorUndefined(titreEtape.geojson4326Perimetre)) { - const result = await getGeojsonInformation(pool, titreEtape.geojson4326Perimetre.geometry)() - if (isRight(result)) { - const { sdom, communes: communeFromGeoJson } = result.right - + await callAndExit(getGeojsonInformation(pool, titreEtape.geojson4326Perimetre.geometry), async ({ sdom, communes: communeFromGeoJson }) => { communes.push(...communeFromGeoJson.map(({ id }) => id)) sdomZones.push(...sdom) - } else { - throw new Error(result.left.message) - } + }) } const titreTypeId = memoize(() => Promise.resolve(titre.typeId)) const administrationsLocales = memoize(() => Promise.resolve(titre.administrationsLocales ?? [])) diff --git a/packages/api/src/api/rest/logs.queries.ts b/packages/api/src/api/rest/logs.queries.ts index 817843b9b..6bd475c3c 100644 --- a/packages/api/src/api/rest/logs.queries.ts +++ b/packages/api/src/api/rest/logs.queries.ts @@ -1,16 +1,16 @@ /* eslint-disable no-restricted-syntax */ import { sql } from '@pgtyped/runtime' -import { DbQueryAccessError, Redefine, newDbQueryAndValidate } from '../../pg-database.js' +import { DbQueryAccessError, Redefine, effectDbQueryAndValidate } from '../../pg-database.js' import { Pool } from 'pg' import { z } from 'zod' import { IInsertLogInternalQuery } from './logs.queries.types.js' import { UtilisateurId } from 'camino-common/src/roles.js' -import { TaskEither } from 'fp-ts/lib/TaskEither.js' import { CaminoError } from 'camino-common/src/zod-tools.js' import { ZodUnparseable } from '../../tools/fp-tools.js' +import { Effect } from 'effect' -export const addLog = (pool: Pool, utilisateur_id: UtilisateurId, method: string, path: string, body?: unknown): TaskEither<CaminoError<ZodUnparseable | DbQueryAccessError>, void[]> => - newDbQueryAndValidate(insertLogInternal, { utilisateur_id, method, path, body }, pool, z.void()) +export const addLog = (pool: Pool, utilisateur_id: UtilisateurId, method: string, path: string, body?: unknown): Effect.Effect<void[], CaminoError<ZodUnparseable | DbQueryAccessError>> => + effectDbQueryAndValidate(insertLogInternal, { utilisateur_id, method, path, body }, pool, z.void()) const insertLogInternal = sql< Redefine< diff --git a/packages/api/src/api/rest/perimetre.queries.ts b/packages/api/src/api/rest/perimetre.queries.ts index d6660275c..aa61696cb 100644 --- a/packages/api/src/api/rest/perimetre.queries.ts +++ b/packages/api/src/api/rest/perimetre.queries.ts @@ -1,9 +1,7 @@ /* eslint-disable no-restricted-syntax */ import { sql } from '@pgtyped/runtime' -import { Redefine, newDbQueryAndValidate, DbQueryAccessError } from '../../pg-database.js' +import { Redefine, DbQueryAccessError, effectDbQueryAndValidate } from '../../pg-database.js' import { z } from 'zod' -import TE from 'fp-ts/lib/TaskEither.js' -import E from 'fp-ts/lib/Either.js' import { Pool } from 'pg' import { GeoSystemeId } from 'camino-common/src/static/geoSystemes.js' import { FeatureMultiPolygon, GenericFeatureCollection, MultiPoint, MultiPolygon, featureMultiPolygonValidator, multiPointsValidator, multiPolygonValidator } from 'camino-common/src/perimetre.js' @@ -17,9 +15,9 @@ import { foretIdValidator } from 'camino-common/src/static/forets.js' import { sdomZoneIdValidator } from 'camino-common/src/static/sdom.js' import { KM2, M2, createM2Validator, km2Validator, m2Validator } from 'camino-common/src/number.js' import { DeepReadonly, isNullOrUndefined } from 'camino-common/src/typescript-tools.js' -import { pipe } from 'fp-ts/lib/function.js' -import { ZodUnparseable, zodParseTaskEither, zodParseTaskEitherCallback } from '../../tools/fp-tools.js' +import { ZodUnparseable, zodParseEffect, zodParseEffectCallback } from '../../tools/fp-tools.js' import { CaminoError } from 'camino-common/src/zod-tools.js' +import { Effect, pipe } from 'effect' const convertPointsStringifyError = 'Impossible de transformer la feature collection' as const const convertPointsConversionError = 'La liste des points est vide' as const @@ -30,33 +28,31 @@ export const convertPoints = <T extends z.ZodTypeAny>( fromGeoSystemeId: GeoSystemeId, toGeoSystemeId: GeoSystemeId, geojsonPoints: GenericFeatureCollection<T> -): TE.TaskEither<CaminoError<ConvertPointsErrors>, GenericFeatureCollection<T>> => { +): Effect.Effect<GenericFeatureCollection<T>, CaminoError<ConvertPointsErrors>> => { if (fromGeoSystemeId === toGeoSystemeId) { - return TE.right(geojsonPoints) + return Effect.succeed(geojsonPoints) } const multiPoint: MultiPoint = { type: 'MultiPoint', coordinates: geojsonPoints.features.map(feature => feature.geometry.coordinates) } return pipe( - TE.fromEither( - E.tryCatch( - () => JSON.stringify(multiPoint), - e => ({ message: convertPointsStringifyError, extra: e }) - ) - ), - TE.flatMap(geojson => newDbQueryAndValidate(convertMultiPointDb, { fromGeoSystemeId, toGeoSystemeId, geojson }, pool, z.object({ geojson: multiPointsValidator }))), - TE.flatMap(result => { + Effect.try({ + try: () => JSON.stringify(multiPoint), + catch: e => ({ message: convertPointsStringifyError, extra: e }), + }), + Effect.flatMap(geojson => effectDbQueryAndValidate(convertMultiPointDb, { fromGeoSystemeId, toGeoSystemeId, geojson }, pool, z.object({ geojson: multiPointsValidator }))), + Effect.flatMap(result => { if (result.length === 0) { - return TE.left({ message: convertPointsConversionError }) + return Effect.fail({ message: convertPointsConversionError }) } - return TE.right(result[0].geojson.coordinates) + return Effect.succeed(result[0].geojson.coordinates) }), - TE.filterOrElseW( + Effect.filterOrFail( coordinates => coordinates.length === geojsonPoints.features.length, () => ({ message: convertPointsInvalidNumberOfFeaturesError }) ), - TE.map(coordinates => { + Effect.map(coordinates => { return { type: 'FeatureCollection', features: geojsonPoints.features.map((feature, index) => { @@ -82,24 +78,22 @@ export const getGeojsonByGeoSystemeId = ( fromGeoSystemeId: GeoSystemeId, toGeoSystemeId: GeoSystemeId, geojson: FeatureMultiPolygon -): TE.TaskEither<CaminoError<GetGeojsonByGeoSystemeIdErrorMessages>, FeatureMultiPolygon> => { +): Effect.Effect<FeatureMultiPolygon, CaminoError<GetGeojsonByGeoSystemeIdErrorMessages>> => { return pipe( - TE.fromEither( - E.tryCatch( - () => JSON.stringify(geojson.geometry), - () => ({ message: conversionGeometrieError }) - ) - ), - TE.flatMap(geojson => newDbQueryAndValidate(getGeojsonByGeoSystemeIdDb, { fromGeoSystemeId, toGeoSystemeId, geojson }, pool, getGeojsonByGeoSystemeIdValidator)), - TE.filterOrElseW( + Effect.try({ + try: () => JSON.stringify(geojson.geometry), + catch: () => ({ message: conversionGeometrieError }), + }), + Effect.flatMap(geojson => effectDbQueryAndValidate(getGeojsonByGeoSystemeIdDb, { fromGeoSystemeId, toGeoSystemeId, geojson }, pool, getGeojsonByGeoSystemeIdValidator)), + Effect.filterOrFail( result => result.length === 1, () => ({ message: conversionSystemeError, extra: toGeoSystemeId }) ), - TE.filterOrElseW( + Effect.filterOrFail( result => result[0].is_valid === true, () => ({ message: perimetreInvalideError, extra: { fromGeoSystemeId, geojson } }) ), - TE.map(result => { + Effect.map(result => { if (fromGeoSystemeId === toGeoSystemeId) { return geojson } @@ -111,7 +105,7 @@ export const getGeojsonByGeoSystemeId = ( return feature }), - TE.flatMap(zodParseTaskEitherCallback(featureMultiPolygonValidator)) + Effect.flatMap(zodParseEffectCallback(featureMultiPolygonValidator)) ) } @@ -135,8 +129,8 @@ export const getTitresIntersectionWithGeojson = ( pool: Pool, geojson4326_perimetre: MultiPolygon, titreSlug: TitreSlug -): TE.TaskEither<CaminoError<ZodUnparseable | DbQueryAccessError>, GetTitresIntersectionWithGeojson[]> => { - return newDbQueryAndValidate( +): Effect.Effect<GetTitresIntersectionWithGeojson[], CaminoError<ZodUnparseable | DbQueryAccessError>> => { + return effectDbQueryAndValidate( getTitresIntersectionWithGeojsonDb, { titre_slug: titreSlug, @@ -173,16 +167,16 @@ where 1) = $ domaine_id ! ` -const m2ToKm2 = (value: M2): TE.TaskEither<CaminoError<ZodUnparseable>, KM2> => zodParseTaskEither(km2Validator, Number.parseFloat((value / 1_000_000).toFixed(2))) +const m2ToKm2 = (value: M2): Effect.Effect<KM2, CaminoError<ZodUnparseable>> => zodParseEffect(km2Validator, Number.parseFloat((value / 1_000_000).toFixed(2))) const requestError = 'Une erreur inattendue est survenue lors de la récupération des informations geojson en base' as const export type GetGeojsonInformationErrorMessages = ZodUnparseable | DbQueryAccessError | typeof requestError -export const getGeojsonInformation = (pool: Pool, geojson4326_perimetre: MultiPolygon): TE.TaskEither<CaminoError<GetGeojsonInformationErrorMessages>, GetGeojsonInformation> => { +export const getGeojsonInformation = (pool: Pool, geojson4326_perimetre: MultiPolygon): Effect.Effect<GetGeojsonInformation, CaminoError<GetGeojsonInformationErrorMessages>> => { return pipe( - newDbQueryAndValidate(getGeojsonInformationDb, { geojson4326_perimetre }, pool, getGeojsonInformationDbValidator), - TE.bindW('response', result => (result.length === 1 ? TE.right(result[0]) : TE.left({ message: requestError }))), - TE.bindW('surface', result => m2ToKm2(result.response.surface)), - TE.map(({ response, surface }) => { + effectDbQueryAndValidate(getGeojsonInformationDb, { geojson4326_perimetre }, pool, getGeojsonInformationDbValidator), + Effect.bind('response', result => (result.length === 1 ? Effect.succeed(result[0]) : Effect.fail({ message: requestError }))), + Effect.bind('surface', result => m2ToKm2(result.response.surface)), + Effect.map(({ response, surface }) => { return { ...response, surface } }) ) diff --git a/packages/api/src/api/rest/perimetre.ts b/packages/api/src/api/rest/perimetre.ts index 46a5a97af..660a053b6 100644 --- a/packages/api/src/api/rest/perimetre.ts +++ b/packages/api/src/api/rest/perimetre.ts @@ -1,8 +1,7 @@ import { DemarcheId, demarcheIdOrSlugValidator } from 'camino-common/src/demarche.js' import { CaminoRequest, CustomResponse } from './express-type.js' import { Pool } from 'pg' -import TE from 'fp-ts/lib/TaskEither.js' -import { pipe } from 'fp-ts/lib/function.js' +import { pipe, Effect, Match } from 'effect' import { GEO_SYSTEME_IDS, GeoSystemeId, GeoSystemes } from 'camino-common/src/static/geoSystemes.js' import { HTTP_STATUS } from 'camino-common/src/http.js' import { @@ -45,7 +44,7 @@ import { import { join } from 'node:path' import { readFileSync } from 'node:fs' import shpjs from 'shpjs' -import { DeepReadonly, exhaustiveCheck, isNotNullNorUndefined, isNullOrUndefined, memoize } from 'camino-common/src/typescript-tools.js' +import { DeepReadonly, isNotNullNorUndefined, isNullOrUndefined, memoize } from 'camino-common/src/typescript-tools.js' import { SDOMZoneId } from 'camino-common/src/static/sdom.js' import { TitreSlug } from 'camino-common/src/validators/titres.js' import { canReadEtape } from './permissions/etapes.js' @@ -55,8 +54,7 @@ import { ZodTypeAny, z } from 'zod' import { CommuneId } from 'camino-common/src/static/communes' import { CaminoApiError } from '../../types.js' import { DbQueryAccessError } from '../../pg-database.js' -import E, { isRight } from 'fp-ts/lib/Either.js' -import { ZodUnparseable, zodParseEither, zodParseEitherCallback } from '../../tools/fp-tools.js' +import { ZodUnparseable, callAndExit, zodParseEffect, zodParseEffectCallback } from '../../tools/fp-tools.js' import { CaminoError } from 'camino-common/src/zod-tools.js' export const getPerimetreInfos = (pool: Pool) => async (req: CaminoRequest, res: CustomResponse<PerimetreInformations>) => { @@ -118,22 +116,19 @@ export const getPerimetreInfos = (pool: Pool) => async (req: CaminoRequest, res: { ...demarche, titre_public_lecture: titre.public_lecture } ) ) { - const superpositionAlertes = await getAlertesSuperposition(etape.geojson4326_perimetre, titre.titre_type_id, titre.titre_slug, user, pool)() - if (isRight(superpositionAlertes)) { + await callAndExit(getAlertesSuperposition(etape.geojson4326_perimetre, titre.titre_type_id, titre.titre_slug, user, pool), async superpositionAlertes => { res.json({ - superposition_alertes: superpositionAlertes.right, + superposition_alertes: superpositionAlertes, sdomZoneIds: etape.sdom_zones, communes: etape.communes, }) - } else { - res.status(HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR).send(superpositionAlertes.left) - } + }) } else { res.sendStatus(HTTP_STATUS.HTTP_STATUS_FORBIDDEN) } } } catch (e) { - res.sendStatus(HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR) + res.status(HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR).send(e) console.error(e) } } @@ -203,165 +198,153 @@ export const geojsonImport = ( user: DeepReadonly<UserNotNull>, body: DeepReadonly<GeojsonImportBody>, params: { geoSystemeId: GeoSystemeId } -): TE.TaskEither<CaminoApiError<GeojsonImportErrorMessages>, DeepReadonly<GeojsonInformations>> => { +): Effect.Effect<DeepReadonly<GeojsonInformations>, CaminoApiError<GeojsonImportErrorMessages>> => { const pathFrom = join(process.cwd(), `/files/tmp/${body.tempDocumentName}`) return pipe( - TE.fromEither( - pipe( - E.Do, - E.flatMap< - Record<string, never>, - CaminoError<GeojsonImportErrorMessages>, - { geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon; geojsonOriginFeatureCollectionPoints: null | FeatureCollectionPoints } - >(() => { - const fileType = body.fileType - switch (fileType) { - case 'geojson': { - return pipe( - fileNameToJson(pathFrom, geojsonValidator), - E.flatMap(features => - E.tryCatch( - () => { - const firstGeometry = features.features[0].geometry - const multiPolygon: MultiPolygon = firstGeometry.type === 'Polygon' ? { type: 'MultiPolygon', coordinates: [firstGeometry.coordinates] } : firstGeometry - - const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', properties: {}, geometry: multiPolygon } - - let geojsonOriginFeatureCollectionPoints: null | FeatureCollectionPoints = null - // On a des points après le multipolygone - if (features.features.length > 1) { - const [_multi, ...points] = features.features - geojsonOriginFeatureCollectionPoints = { type: 'FeatureCollection', features: points } - } - - return { geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints } - }, - e => ({ message: extractionGeoJSONError, extra: e }) - ) - ) + Effect.Do.pipe( + Effect.flatMap< + Record<string, never>, + { geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon; geojsonOriginFeatureCollectionPoints: null | FeatureCollectionPoints }, + CaminoError<GeojsonImportErrorMessages>, + never + >(() => { + return Match.value(body.fileType).pipe( + Match.when('geojson', () => + pipe( + fileNameToJson(pathFrom, geojsonValidator), + Effect.flatMap(features => + Effect.try({ + try: () => { + const firstGeometry = features.features[0].geometry + const multiPolygon: MultiPolygon = firstGeometry.type === 'Polygon' ? { type: 'MultiPolygon', coordinates: [firstGeometry.coordinates] } : firstGeometry + + const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', properties: {}, geometry: multiPolygon } + + let geojsonOriginFeatureCollectionPoints: null | FeatureCollectionPoints = null + // On a des points après le multipolygone + if (features.features.length > 1) { + const [_multi, ...points] = features.features + geojsonOriginFeatureCollectionPoints = { type: 'FeatureCollection', features: points } + } + + return { geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints } + }, + catch: e => ({ message: extractionGeoJSONError, extra: e }), + }) ) - } - case 'shp': { - return pipe( - fileNameToShape(pathFrom, shapeValidator), - E.flatMap(shapePolygonOrMultipolygons => - E.tryCatch( - () => { - const shapePolygonOrMultipolygon = shapePolygonOrMultipolygons[0] - let coordinates: [number, number][][][] - if (shapePolygonOrMultipolygon.type === 'MultiPolygon') { - coordinates = shapePolygonOrMultipolygon.coordinates - } else { - coordinates = [shapePolygonOrMultipolygon.coordinates] - } - const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', geometry: { type: 'MultiPolygon', coordinates }, properties: {} } - - return { geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints: null } - }, - e => ({ message: extractionShapeError, extra: e }) - ) - ) + ) + ), + Match.when('shp', () => + pipe( + fileNameToShape(pathFrom, shapeValidator), + Effect.flatMap(shapePolygonOrMultipolygons => + Effect.try({ + try: () => { + const shapePolygonOrMultipolygon = shapePolygonOrMultipolygons[0] + let coordinates: [number, number][][][] + if (shapePolygonOrMultipolygon.type === 'MultiPolygon') { + coordinates = shapePolygonOrMultipolygon.coordinates + } else { + coordinates = [shapePolygonOrMultipolygon.coordinates] + } + const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', geometry: { type: 'MultiPolygon', coordinates }, properties: {} } + + return { geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints: null } + }, + catch: e => ({ message: extractionShapeError, extra: e }), + }) ) - } - case 'csv': { - return pipe( - fileNameToCsv(pathFrom), - E.filterOrElseW( - converted => converted.length <= 20, - () => ({ message: importCsvRestrictionError }) - ), - E.flatMap(converted => { - const uniteId = GeoSystemes[params.geoSystemeId].uniteId - let myPipe - switch (uniteId) { - case 'met': { - myPipe = pipe( - zodParseEither(csvXYValidator, converted), - E.flatMap(rows => - E.tryCatch( - () => { - const coordinates: MultiPolygon['coordinates'] = [[rows.map(({ x, y }) => [x, y])]] - const points: FeatureCollectionPoints['features'] = rows.map(ligne => ({ - type: 'Feature', - properties: { nom: ligne.nom, description: ligne.description }, - geometry: { type: 'Point', coordinates: [ligne.x, ligne.y] }, - })) - - coordinates[0][0].push([rows[0].x, rows[0].y]) - - return { coordinates, points } - }, - e => ({ message: recuperationInfoCsvError, extra: e }) - ) - ) + ) + ), + Match.when('csv', () => + pipe( + fileNameToCsv(pathFrom), + Effect.filterOrFail( + converted => converted.length <= 20, + () => ({ message: importCsvRestrictionError }) + ), + Effect.flatMap(converted => { + const uniteId = GeoSystemes[params.geoSystemeId].uniteId + const myPipe = Match.value(uniteId).pipe( + Match.when('met', () => + pipe( + zodParseEffect(csvXYValidator, converted), + Effect.flatMap(rows => + Effect.try({ + try: () => { + const coordinates: MultiPolygon['coordinates'] = [[rows.map(({ x, y }) => [x, y])]] + const points: FeatureCollectionPoints['features'] = rows.map(ligne => ({ + type: 'Feature', + properties: { nom: ligne.nom, description: ligne.description }, + geometry: { type: 'Point', coordinates: [ligne.x, ligne.y] }, + })) + + coordinates[0][0].push([rows[0].x, rows[0].y]) + + return { coordinates, points } + }, + catch: e => ({ message: recuperationInfoCsvError, extra: e }), + }) ) - break - } - case 'gon': - case 'deg': { - myPipe = pipe( - zodParseEither(csvLatLongValidator, converted), - E.flatMap(rows => - E.tryCatch( - () => { - const coordinates: MultiPolygon['coordinates'] = [[rows.map(({ longitude, latitude }) => [longitude, latitude])]] - const points: FeatureCollectionPoints['features'] = rows.map(ligne => ({ - type: 'Feature', - properties: { nom: ligne.nom, description: ligne.description }, - geometry: { type: 'Point', coordinates: [ligne.longitude, ligne.latitude] }, - })) - coordinates[0][0].push([rows[0].longitude, rows[0].latitude]) - - return { coordinates, points } - }, - e => ({ message: recuperationInfoCsvError, extra: e }) - ) - ) + ) + ), + Match.whenOr('gon', 'deg', () => + pipe( + zodParseEffect(csvLatLongValidator, converted), + Effect.flatMap(rows => + Effect.try({ + try: () => { + const coordinates: MultiPolygon['coordinates'] = [[rows.map(({ longitude, latitude }) => [longitude, latitude])]] + const points: FeatureCollectionPoints['features'] = rows.map(ligne => ({ + type: 'Feature', + properties: { nom: ligne.nom, description: ligne.description }, + geometry: { type: 'Point', coordinates: [ligne.longitude, ligne.latitude] }, + })) + coordinates[0][0].push([rows[0].longitude, rows[0].latitude]) + + return { coordinates, points } + }, + catch: e => ({ message: recuperationInfoCsvError, extra: e }), + }) ) - break - } - default: { - exhaustiveCheck(uniteId) - throw new Error('impossible') - } - } + ) + ), + Match.exhaustive + ) - return pipe( - myPipe, - E.flatMap(({ coordinates, points }) => { - const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', properties: {}, geometry: { type: 'MultiPolygon', coordinates } } - const geojsonOriginFeatureCollectionPoints: FeatureCollectionPoints = { type: 'FeatureCollection', features: points } + return pipe( + myPipe, + Effect.map(({ coordinates, points }) => { + const geojsonOriginFeatureMultiPolygon: FeatureMultiPolygon = { type: 'Feature', properties: {}, geometry: { type: 'MultiPolygon', coordinates } } + const geojsonOriginFeatureCollectionPoints: FeatureCollectionPoints = { type: 'FeatureCollection', features: points } - return E.right({ geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints }) - }) - ) - }) - ) - } - default: - exhaustiveCheck(fileType) - throw new Error('Impossible') - } - }) - ) + return { geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints } + }) + ) + }) + ) + ), + Match.exhaustive + ) + }) ), - TE.bindW('geojson4326MultiPolygon', ({ geojsonOriginFeatureMultiPolygon }) => + Effect.bind('geojson4326MultiPolygon', ({ geojsonOriginFeatureMultiPolygon }) => pipe( getGeojsonByGeoSystemeIdQuery(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, geojsonOriginFeatureMultiPolygon), - TE.map(result => result.geometry) + Effect.map(result => result.geometry) ) ), - TE.bindW('geojson4326FeatureCollectionPoints', ({ geojsonOriginFeatureCollectionPoints }) => { + Effect.bind('geojson4326FeatureCollectionPoints', ({ geojsonOriginFeatureCollectionPoints }) => { if (isNotNullNorUndefined(geojsonOriginFeatureCollectionPoints)) { return convertPoints(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, geojsonOriginFeatureCollectionPoints) } - return TE.right(null) + return Effect.succeed(null) }), - TE.bindW('geoInfo', ({ geojson4326MultiPolygon }) => getGeojsonInformation(pool, geojson4326MultiPolygon)), - TE.bindW('alertesSuperposition', ({ geojson4326MultiPolygon }) => getAlertesSuperposition(geojson4326MultiPolygon, body.titreTypeId, body.titreSlug, user, pool)), - TE.map(({ geojson4326MultiPolygon, geojson4326FeatureCollectionPoints, geoInfo, alertesSuperposition, geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints }) => { + Effect.bind('geoInfo', ({ geojson4326MultiPolygon }) => getGeojsonInformation(pool, geojson4326MultiPolygon)), + Effect.bind('alertesSuperposition', ({ geojson4326MultiPolygon }) => getAlertesSuperposition(geojson4326MultiPolygon, body.titreTypeId, body.titreSlug, user, pool)), + Effect.map(({ geojson4326MultiPolygon, geojson4326FeatureCollectionPoints, geoInfo, alertesSuperposition, geojsonOriginFeatureMultiPolygon, geojsonOriginFeatureCollectionPoints }) => { const result: GeojsonInformations = { superposition_alertes: alertesSuperposition, communes: geoInfo.communes, @@ -378,58 +361,59 @@ export const geojsonImport = ( return result }), - TE.mapLeft(caminoError => { - const message = caminoError.message - switch (message) { - case 'Une erreur est survenue lors de la lecture du csv': - case 'Une erreur est survenue lors de la récupération des informations du CSV': - case 'Une erreur inattendue est survenue lors de la récupération des informations geojson en base': - case "Impossible d'accéder à la base de données": - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR } - case 'Problème de validation de données': - case "Une erreur s'est produite lors de l'ouverture du fichier GeoJSON": - case "Une erreur s'est produite lors de l'ouverture du fichier shape": - case 'Impossible de convertir la géométrie en JSON': - case 'Impossible de convertir le geojson vers le système': - case "L'import CSV est fait pour des petits polygones simple de moins de 20 sommets": - case "Le périmètre n'est pas valide dans le référentiel donné": - case "Une erreur s'est produite lors de l'extraction du multi-polygone du fichier GeoJSON": - case "Une erreur s'est produite lors de l'extraction du multi-polygone du fichier shape": - case 'Impossible de transformer la feature collection': - case 'La liste des points est vide': - case 'Le nombre de points est invalide': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } - default: - exhaustiveCheck(message) - throw new Error('impossible') - } - }) + Effect.mapError(caminoError => + Match.value(caminoError.message).pipe( + Match.whenOr( + 'Une erreur est survenue lors de la lecture du csv', + 'Une erreur est survenue lors de la récupération des informations du CSV', + 'Une erreur inattendue est survenue lors de la récupération des informations geojson en base', + "Impossible d'accéder à la base de données", + () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR }) + ), + Match.whenOr( + 'Problème de validation de données', + "Une erreur s'est produite lors de l'ouverture du fichier GeoJSON", + "Une erreur s'est produite lors de l'ouverture du fichier shape", + 'Impossible de convertir la géométrie en JSON', + 'Impossible de convertir le geojson vers le système', + "L'import CSV est fait pour des petits polygones simple de moins de 20 sommets", + "Le périmètre n'est pas valide dans le référentiel donné", + "Une erreur s'est produite lors de l'extraction du multi-polygone du fichier GeoJSON", + "Une erreur s'est produite lors de l'extraction du multi-polygone du fichier shape", + 'Impossible de transformer la feature collection', + 'La liste des points est vide', + 'Le nombre de points est invalide', + () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST }) + ), + Match.exhaustive + ) + ) ) } -const fileNameToJson = <T extends ZodTypeAny>(pathFrom: string, validator: T): E.Either<CaminoError<ZodUnparseable | typeof ouvertureGeoJSONError>, z.infer<T>> => { +const fileNameToJson = <T extends ZodTypeAny>(pathFrom: string, validator: T): Effect.Effect<z.infer<T>, CaminoError<ZodUnparseable | typeof ouvertureGeoJSONError>> => { return pipe( - E.tryCatch( - () => { + Effect.try({ + try: () => { const fileContent = readFileSync(pathFrom) return JSON.parse(fileContent.toString()) }, - () => ({ message: ouvertureGeoJSONError }) - ), - E.flatMap(zodParseEitherCallback(validator)) + catch: () => ({ message: ouvertureGeoJSONError }), + }), + Effect.flatMap(zodParseEffectCallback(validator)) ) } -const fileNameToShape = <T extends ZodTypeAny>(pathFrom: string, validator: T): E.Either<CaminoError<ZodUnparseable | typeof ouvertureShapeError>, z.infer<T>> => { +const fileNameToShape = <T extends ZodTypeAny>(pathFrom: string, validator: T): Effect.Effect<z.infer<T>, CaminoError<ZodUnparseable | typeof ouvertureShapeError>> => { return pipe( - E.tryCatch( - () => { + Effect.try({ + try: () => { const fileContent = readFileSync(pathFrom) return shpjs.parseShp(fileContent) }, - () => ({ message: ouvertureShapeError }) - ), - E.flatMap(zodParseEitherCallback(validator)) + catch: () => ({ message: ouvertureShapeError }), + }), + Effect.flatMap(zodParseEffectCallback(validator)) ) } @@ -443,9 +427,9 @@ const readIsoOrUTF8FileSync = (path: string): string => { } } -const fileNameToCsv = (pathFrom: string): E.Either<CaminoError<typeof ouvertureCsvError>, unknown[]> => { - return E.tryCatch( - () => { +const fileNameToCsv = (pathFrom: string): Effect.Effect<unknown[], CaminoError<typeof ouvertureCsvError>> => { + return Effect.try({ + try: () => { const fileContent = readIsoOrUTF8FileSync(pathFrom) const result = xlsx.read(fileContent, { type: 'string', FS: ';', raw: true }) @@ -457,60 +441,56 @@ const fileNameToCsv = (pathFrom: string): E.Either<CaminoError<typeof ouvertureC return xlsx.utils.sheet_to_json(sheet1, { raw: true }) }, - e => ({ message: ouvertureCsvError, extra: e }) - ) + catch: e => ({ message: ouvertureCsvError, extra: e }), + }) } -type GeosjsonImportPointsErrorMessages = ZodUnparseable | DbQueryAccessError | 'Accès interdit' | 'Fichier incorrect' | ConvertPointsErrors +const accesInterditError = 'Accès interdit' as const +type GeosjsonImportPointsErrorMessages = ZodUnparseable | DbQueryAccessError | typeof accesInterditError | 'Fichier incorrect' | ConvertPointsErrors + export const geojsonImportPoints = ( pool: Pool, user: DeepReadonly<UserNotNull>, geojsonImportInput: DeepReadonly<GeojsonImportPointsBody>, params: { geoSystemeId: GeoSystemeId } -): TE.TaskEither<CaminoApiError<GeosjsonImportPointsErrorMessages>, GeojsonImportPointsResponse> => { - return pipe( - TE.Do, - TE.filterOrElseW( +): Effect.Effect<GeojsonImportPointsResponse, CaminoApiError<GeosjsonImportPointsErrorMessages>> => { + return Effect.Do.pipe( + Effect.filterOrFail( () => !isDefault(user), () => ({ message: 'Accès interdit' as const }) ), - TE.bindW('features', () => { - return TE.fromEither( - pipe( - E.tryCatch( - () => { - const filename = geojsonImportInput.tempDocumentName - const pathFrom = join(process.cwd(), `/files/tmp/${filename}`) - const fileContent = readFileSync(pathFrom) - - return JSON.parse(fileContent.toString()) - }, - () => ({ message: 'Fichier incorrect' as const }) - ), - E.flatMap(zodParseEitherCallback(featureCollectionPointsValidator)) - ) + Effect.bind('features', () => { + return pipe( + Effect.try({ + try: () => { + const filename = geojsonImportInput.tempDocumentName + const pathFrom = join(process.cwd(), `/files/tmp/${filename}`) + const fileContent = readFileSync(pathFrom) + + return JSON.parse(fileContent.toString()) + }, + catch: () => ({ message: 'Fichier incorrect' as const }), + }), + Effect.flatMap(zodParseEffectCallback(featureCollectionPointsValidator)) ) }), - TE.bindW('geojson4326points', ({ features }) => convertPoints(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, features)), - TE.map(result => ({ geojson4326: result.geojson4326points, origin: result.features })), - TE.mapLeft(caminoError => { - const message = caminoError.message - switch (message) { - case 'Accès interdit': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN } - case "Impossible d'accéder à la base de données": - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR } - case 'Problème de validation de données': - case 'Fichier incorrect': - case 'Impossible de transformer la feature collection': - case 'La liste des points est vide': - case 'Le nombre de points est invalide': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } - default: - exhaustiveCheck(message) - throw new Error('impossible') - } - }) + Effect.bind('geojson4326points', ({ features }) => convertPoints(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, features)), + Effect.map(result => ({ geojson4326: result.geojson4326points, origin: result.features })), + Effect.mapError(caminoError => + Match.value(caminoError.message).pipe( + Match.when('Accès interdit', () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN })), + Match.when("Impossible d'accéder à la base de données", () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })), + Match.whenOr( + 'Fichier incorrect', + 'Impossible de transformer la feature collection', + 'La liste des points est vide', + 'Le nombre de points est invalide', + 'Problème de validation de données', + () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST }) + ), + Match.exhaustive + ) + ) ) } @@ -520,35 +500,28 @@ export const geojsonImportForages = ( _user: DeepReadonly<UserNotNull>, body: DeepReadonly<GeojsonImportForagesBody>, params: { geoSystemeId: GeoSystemeId } -): TE.TaskEither<CaminoApiError<GeosjsonImportForagesErrorMessages>, GeojsonImportForagesResponse> => { +): Effect.Effect<GeojsonImportForagesResponse, CaminoApiError<GeosjsonImportForagesErrorMessages>> => { const filename = body.tempDocumentName const pathFrom = join(process.cwd(), `/files/tmp/${filename}`) const fileType = body.fileType - return pipe( - TE.Do, - TE.bindW<'features', object, CaminoError<GeosjsonImportForagesErrorMessages>, FeatureCollectionForages>('features', () => { - let myPipe: E.Either<CaminoError<GeosjsonImportForagesErrorMessages>, z.infer<typeof featureCollectionForagesValidator>> - switch (fileType) { - case 'geojson': { - myPipe = fileNameToJson(pathFrom, featureCollectionForagesValidator) - break - } - case 'shp': { - myPipe = fileNameToShape(pathFrom, featureCollectionForagesValidator) - break - } - case 'csv': { - myPipe = pipe( + return Effect.Do.pipe( + Effect.bind('features', () => + Match.value(fileType).pipe( + Match.when('geojson', () => fileNameToJson(pathFrom, featureCollectionForagesValidator)), + Match.when('shp', () => fileNameToShape(pathFrom, featureCollectionForagesValidator)), + Match.when('csv', () => + pipe( fileNameToCsv(pathFrom), - E.flatMap(converted => { + Effect.flatMap(converted => { const uniteId = GeoSystemes[params.geoSystemeId].uniteId - switch (uniteId) { - case 'met': { - return pipe( - zodParseEither(csvForageXYValidator, converted), - E.map(rows => { + + return Match.value(uniteId).pipe( + Match.when('met', () => + pipe( + zodParseEffect(csvForageXYValidator, converted), + Effect.map(rows => { const points: FeatureCollectionForages['features'] = rows.map(ligne => ({ type: 'Feature', properties: { nom: ligne.nom, description: ligne.description, profondeur: ligne.profondeur, type: ligne.type }, @@ -558,12 +531,11 @@ export const geojsonImportForages = ( return { type: 'FeatureCollection', features: points } as const }) ) - } - case 'gon': - case 'deg': { - return pipe( - zodParseEither(csvForageDegValidator, converted), - E.map(rows => { + ), + Match.whenOr('gon', 'deg', () => + pipe( + zodParseEffect(csvForageDegValidator, converted), + Effect.map(rows => { const points: FeatureCollectionForages['features'] = rows.map(ligne => ({ type: 'Feature', properties: { nom: ligne.nom, description: ligne.description, profondeur: ligne.profondeur, type: ligne.type }, @@ -573,47 +545,36 @@ export const geojsonImportForages = ( return { type: 'FeatureCollection', features: points } as const }) ) - } - default: - exhaustiveCheck(uniteId) - throw new Error('Cas impossible mais typescript ne voit pas que exhaustiveCheck throw une exception') - } + ), + Match.exhaustive + ) }), - E.flatMap(zodParseEitherCallback(featureCollectionForagesValidator)) + Effect.flatMap(zodParseEffectCallback(featureCollectionForagesValidator)) ) - break - } - - default: { - exhaustiveCheck(fileType) - throw new Error('Cas impossible mais typescript ne voit pas que exhaustiveCheck throw une exception') - } - } - - return TE.fromEither(myPipe) - }), - TE.bindW('conversion', ({ features }) => convertPoints(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, features)), - TE.map(({ conversion, features }) => { + ), + Match.exhaustive + ) + ), + Effect.bind('conversion', ({ features }) => convertPoints(pool, params.geoSystemeId, GEO_SYSTEME_IDS.WGS84, features)), + Effect.map(({ conversion, features }) => { return { geojson4326: conversion, origin: features } }), - TE.mapLeft(caminoError => { - const message = caminoError.message - switch (message) { - case "Impossible d'accéder à la base de données": - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR } - case 'Problème de validation de données': - case 'Une erreur est survenue lors de la lecture du csv': - case "Une erreur s'est produite lors de l'ouverture du fichier GeoJSON": - case "Une erreur s'est produite lors de l'ouverture du fichier shape": - case 'Impossible de transformer la feature collection': - case 'La liste des points est vide': - case 'Le nombre de points est invalide': - return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } - default: - exhaustiveCheck(message) - throw new Error('impossible') - } - }) + Effect.mapError(caminoError => + Match.value(caminoError.message).pipe( + Match.when("Impossible d'accéder à la base de données", () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })), + Match.whenOr( + 'Problème de validation de données', + 'Une erreur est survenue lors de la lecture du csv', + "Une erreur s'est produite lors de l'ouverture du fichier GeoJSON", + "Une erreur s'est produite lors de l'ouverture du fichier shape", + 'Impossible de transformer la feature collection', + 'La liste des points est vide', + 'Le nombre de points est invalide', + () => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST }) + ), + Match.exhaustive + ) + ) ) } @@ -623,11 +584,11 @@ const getAlertesSuperposition = ( titreSlug: TitreSlug, user: DeepReadonly<User>, pool: Pool -): TE.TaskEither<CaminoError<ZodUnparseable | DbQueryAccessError>, GetTitresIntersectionWithGeojson[]> => { +): Effect.Effect<GetTitresIntersectionWithGeojson[], CaminoError<ZodUnparseable | DbQueryAccessError>> => { if (titreTypeId === 'axm' && (isSuper(user) || isAdministrationAdmin(user) || isAdministrationEditeur(user)) && geojson4326_perimetre !== null) { // vérifie qu’il n’existe pas de demandes de titres en cours sur ce périmètre return getTitresIntersectionWithGeojson(pool, geojson4326_perimetre, titreSlug) } - return TE.right([]) + return Effect.succeed([]) } diff --git a/packages/api/src/business/processes/titres-etapes-areas-update.ts b/packages/api/src/business/processes/titres-etapes-areas-update.ts index 65bca2336..41a3fa462 100644 --- a/packages/api/src/business/processes/titres-etapes-areas-update.ts +++ b/packages/api/src/business/processes/titres-etapes-areas-update.ts @@ -11,7 +11,7 @@ import { getGeojsonInformation } from '../../api/rest/perimetre.queries.js' import type { Pool } from 'pg' import { SDOMZoneId } from 'camino-common/src/static/sdom.js' import { M2 } from 'camino-common/src/number.js' -import { isRight } from 'fp-ts/lib/Either.js' +import { callAndExit } from '../../tools/fp-tools.js' /** * Met à jour tous les territoires d’une liste d’étapes @@ -41,17 +41,12 @@ export const titresEtapesAreasUpdate = async (pool: Pool, titresEtapesIds?: stri } try { if (isNotNullNorUndefined(titreEtape.geojson4326Perimetre)) { - const result = await getGeojsonInformation(pool, titreEtape.geojson4326Perimetre.geometry)() - if (isRight(result)) { - const { forets, sdom, secteurs, communes } = result.right - + await callAndExit(getGeojsonInformation(pool, titreEtape.geojson4326Perimetre.geometry), async ({ forets, sdom, secteurs, communes }) => { await intersectForets(titreEtape, forets) await intersectSdom(titreEtape, sdom) await intersectCommunes(titreEtape, communes) await intersectSecteursMaritime(titreEtape, secteurs) - } else { - throw new Error(result.left.message) - } + }) } } catch (e) { console.error(`Une erreur est survenue lors du traitement de l'étape ${titreEtape.id}`) diff --git a/packages/api/src/pg-database.ts b/packages/api/src/pg-database.ts index 0ddc6c7ce..8dcbf1eb3 100644 --- a/packages/api/src/pg-database.ts +++ b/packages/api/src/pg-database.ts @@ -1,13 +1,11 @@ import { TaggedQuery } from '@pgtyped/runtime' -import TE from 'fp-ts/lib/TaskEither.js' - import type { Pool } from 'pg' import { z } from 'zod' import type { ZodType, ZodTypeDef } from 'zod' -import { pipe } from 'fp-ts/lib/function.js' import { CaminoError } from 'camino-common/src/zod-tools.js' -import { ZodUnparseable, zodParseTaskEitherCallback } from './tools/fp-tools.js' +import { ZodUnparseable, zodParseEffectCallback } from './tools/fp-tools.js' +import { Effect, pipe } from 'effect' export type Redefine<T, P, O> = T extends { params: infer A; result: infer B } ? { inputs: keyof A; outputs: keyof B } extends { inputs: keyof P; outputs: keyof O } ? { inputs: keyof P; outputs: keyof O } extends { inputs: keyof A; outputs: keyof B } @@ -32,17 +30,18 @@ export const dbQueryAndValidate = async <Params, Result, T extends ZodType<Resul } export type DbQueryAccessError = "Impossible d'accéder à la base de données" -export const newDbQueryAndValidate = <Params, Result, T extends ZodType<Result, ZodTypeDef, unknown>>( + +export const effectDbQueryAndValidate = <Params, Result, T extends ZodType<Result, ZodTypeDef, unknown>>( query: TaggedQuery<{ params: Params; result: Result }>, params: Params, pool: Pool, validator: T -): TE.TaskEither<CaminoError<DbQueryAccessError | ZodUnparseable>, Result[]> => { +): Effect.Effect<Result[], CaminoError<DbQueryAccessError | ZodUnparseable>> => { return pipe( - TE.tryCatch( + Effect.tryPromise({ // eslint-disable-next-line no-restricted-syntax - () => query.run(params, pool), - e => { + try: () => query.run(params, pool), + catch: e => { let extra = '' if (typeof e === 'string') { extra = e.toUpperCase() @@ -51,8 +50,8 @@ export const newDbQueryAndValidate = <Params, Result, T extends ZodType<Result, } return { message: "Impossible d'accéder à la base de données" as const, extra } - } - ), - TE.flatMap(zodParseTaskEitherCallback(z.array(validator))) + }, + }), + Effect.flatMap(zodParseEffectCallback(z.array(validator))) ) } diff --git a/packages/api/src/server/rest.ts b/packages/api/src/server/rest.ts index b92f032d3..a2db95e95 100644 --- a/packages/api/src/server/rest.ts +++ b/packages/api/src/server/rest.ts @@ -3,7 +3,6 @@ import { CaminoApiError, Index } from '../types.js' import type { Pool } from 'pg' -import TE from 'fp-ts/lib/TaskEither.js' import express from 'express' import { join } from 'path' import { inspect } from 'node:util' @@ -66,8 +65,8 @@ import { titreDemandeCreer } from '../api/rest/titre-demande.js' import { config } from '../config/index.js' import { addLog } from '../api/rest/logs.queries.js' import { HTTP_STATUS } from 'camino-common/src/http.js' -import { pipe } from 'fp-ts/lib/function.js' -import { zodParseTaskEither } from '../tools/fp-tools.js' +import { zodParseEffect } from '../tools/fp-tools.js' +import { Cause, Effect, Exit, pipe } from 'effect' interface IRestResolverResult { nom: string @@ -95,7 +94,7 @@ type RestNewPostCall<Route extends NewPostRestRoutes> = ( user: DeepReadonly<UserNotNull>, body: DeepReadonly<z.infer<CaminoRestRoutesType[Route]['newPost']['input']>>, params: DeepReadonly<z.infer<CaminoRestRoutesType[Route]['params']>> -) => TE.TaskEither<CaminoApiError<string>, DeepReadonly<z.infer<CaminoRestRoutesType[Route]['newPost']['output']>>> +) => Effect.Effect<DeepReadonly<z.infer<CaminoRestRoutesType[Route]['newPost']['output']>>, CaminoApiError<string>> type RestPostCall<Route extends PostRestRoutes> = (pool: Pool) => (req: CaminoRequest, res: CustomResponse<z.infer<CaminoRestRoutesType[Route]['post']['output']>>) => Promise<void> type RestPutCall<Route extends PutRestRoutes> = (pool: Pool) => (req: CaminoRequest, res: CustomResponse<z.infer<CaminoRestRoutesType[Route]['put']['output']>>) => Promise<void> type RestDeleteCall = (pool: Pool) => (req: CaminoRequest, res: CustomResponse<void | Error>) => Promise<void> @@ -209,18 +208,17 @@ export const restWithPool = (dbPool: Pool) => { console.info(`POST ${route}`) rest.post(route, async (req: CaminoRequest, res: express.Response, _next: express.NextFunction) => { try { - const call = pipe( - TE.Do, - TE.bindW('user', () => { + const call = Effect.Do.pipe( + Effect.bind('user', () => { if (isNotNullNorUndefined(req.auth)) { - return TE.right(req.auth as UserNotNull) + return Effect.succeed(req.auth as UserNotNull) } else { - return TE.left({ message: 'Accès interdit', status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN }) + return Effect.fail({ message: 'Accès interdit', status: HTTP_STATUS.HTTP_STATUS_FORBIDDEN }) } }), - TE.bindW('body', () => zodParseTaskEither(maRoute.newPost.input, req.body)), - TE.bindW('params', () => zodParseTaskEither(maRoute.params, req.params)), - TE.mapLeft(caminoError => { + Effect.bind('body', () => zodParseEffect(maRoute.newPost.input, req.body)), + Effect.bind('params', () => zodParseEffect(maRoute.params, req.params)), + Effect.mapError(caminoError => { if (!('status' in caminoError)) { return { ...caminoError, status: HTTP_STATUS.HTTP_STATUS_BAD_REQUEST } } @@ -228,30 +226,37 @@ export const restWithPool = (dbPool: Pool) => { return caminoError }), // TODO 2024-06-26 ici, si on ne met pas le body et params à any, on se retrouve avec une typescript union hell qui fait tout planter - TE.bindW<'result', { body: any; user: UserNotNull; params: any }, CaminoApiError<string>, DeepReadonly<z.infer<(typeof maRoute)['newPost']['output']>>>( + Effect.bind<'result', { body: any; user: UserNotNull; params: any }, DeepReadonly<z.infer<(typeof maRoute)['newPost']['output']>>, CaminoApiError<string>, never>( 'result', ({ user, body, params }) => maRoute.newPostCall(dbPool, user, body, params) ), - TE.bindW('parsedResult', ({ result }) => + Effect.bind('parsedResult', ({ result }) => pipe( - zodParseTaskEither(maRoute.newPost.output, result), - TE.mapLeft(caminoError => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })) + zodParseEffect(maRoute.newPost.output, result), + Effect.mapError(caminoError => ({ ...caminoError, status: HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR })) ) ), - TE.mapBoth( - caminoError => { + Effect.mapBoth({ + onFailure: caminoError => { console.warn(`problem with route ${route}: ${caminoError.message}`) res.status(caminoError.status).json(caminoError) }, - ({ parsedResult, user }) => { + onSuccess: ({ parsedResult, user }) => { res.json(parsedResult) return addLog(dbPool, user.id, 'post', req.url, req.body) - } - ) + }, + }), + Effect.runPromiseExit ) - await call() + const pipeline = await call + if (Exit.isFailure(pipeline)) { + if (!Cause.isFailType(pipeline.cause)) { + console.error('catching error on newPost route', route, pipeline.cause, req.body) + res.status(HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR).json({ message: "une erreur inattendue s'est produite", extra: pipeline.cause }) + } + } } catch (e) { console.error('catching error on newPost route', route, e, req.body) res.status(HTTP_STATUS.HTTP_STATUS_INTERNAL_SERVER_ERROR).json({ message: "une erreur inattendue s'est produite", extra: e }) @@ -311,7 +316,7 @@ const restCatcherWithMutation = (method: string, expressCall: ExpressRoute, pool res.sendStatus(HTTP_STATUS.HTTP_STATUS_FORBIDDEN) } else { await expressCall(req, res, next) - await addLog(pool, user.id, method, req.url, req.body)() + await pipe(addLog(pool, user.id, method, req.url, req.body), Effect.runPromise) } } catch (e) { console.error('catching error', e) diff --git a/packages/api/src/tools/fp-tools.ts b/packages/api/src/tools/fp-tools.ts index 3db5472f7..0edb00d75 100644 --- a/packages/api/src/tools/fp-tools.ts +++ b/packages/api/src/tools/fp-tools.ts @@ -1,27 +1,32 @@ import { CaminoError, CaminoZodErrorReadableMessage } from 'camino-common/src/zod-tools.js' -import E from 'fp-ts/lib/Either.js' -import TE from 'fp-ts/lib/TaskEither.js' +import { Cause, Effect, Exit, pipe } from 'effect' import { ZodTypeAny } from 'zod' import { fromError } from 'zod-validation-error' export type ZodUnparseable = 'Problème de validation de données' -export const zodParseEitherCallback = - <T extends ZodTypeAny>(validator: T) => - (value: unknown): E.Either<CaminoError<ZodUnparseable>, T['_output']> => - zodParseEither(validator, value) -export const zodParseTaskEitherCallback = +export const zodParseEffectCallback = <T extends ZodTypeAny>(validator: T) => - (value: unknown): TE.TaskEither<CaminoError<ZodUnparseable>, T['_output']> => - zodParseTaskEither(validator, value) + (value: unknown): Effect.Effect<T['_output'], CaminoError<ZodUnparseable>> => + zodParseEffect(validator, value) -export const zodParseEither = <T extends ZodTypeAny>(validator: T, item: unknown): E.Either<CaminoError<ZodUnparseable>, T['_output']> => { - return E.tryCatch( - () => validator.parse(item), - myError => ({ message: 'Problème de validation de données', zodErrorReadableMessage: fromError(myError).toString() as CaminoZodErrorReadableMessage }) - ) +export const zodParseEffect = <T extends ZodTypeAny>(validator: T, item: unknown): Effect.Effect<T['_output'], CaminoError<ZodUnparseable>> => { + return Effect.try({ + try: () => validator.parse(item), + catch: myError => ({ message: 'Problème de validation de données', zodErrorReadableMessage: fromError(myError).toString() as CaminoZodErrorReadableMessage }), + }) } -export const zodParseTaskEither = <T extends ZodTypeAny>(validator: T, item: unknown): TE.TaskEither<CaminoError<ZodUnparseable>, T['_output']> => { - return TE.fromEither(zodParseEither(validator, item)) +export const callAndExit = async <A, T>(toCall: Effect.Effect<A, CaminoError<string>, never>, success: (value: A) => Promise<T>) => { + const pipeline = await pipe(toCall, Effect.runPromiseExit) + + if (Exit.isSuccess(pipeline)) { + return success(pipeline.value) + } else { + if (Cause.isFailType(pipeline.cause)) { + throw new Error(pipeline.cause.error.message) + } else { + throw new Error(`Unexpected error ${pipeline.cause}`) + } + } } -- GitLab