diff --git a/apps/hash-ai-worker-ts/README.md b/apps/hash-ai-worker-ts/README.md index 80e18c2df77..c7ca127c15b 100644 --- a/apps/hash-ai-worker-ts/README.md +++ b/apps/hash-ai-worker-ts/README.md @@ -17,6 +17,9 @@ The service uses the following environment variables: - `HASH_VAULT_HOST`: The host address (including protocol) that the Vault server is running on, e.g. `http://127.0.0.1` - `HASH_VAULT_PORT`: The port that the Vault server is running on, e.g. `8200` - `HASH_VAULT_ROOT_TOKEN`: The token to authenticate with the Vault server. +- `GOOGLE_CLOUD_HASH_PROJECT_ID`: The projectId for a Google Cloud Platform project, used in document analysis (Vertex AI and Cloud Storage). Note that this is the Project ID, _not_ the Project Number. +- `GOOGLE_CLOUD_STORAGE_BUCKET`: The name of the Google Cloud Storage bucket to use for document analysis. +- `GOOGLE_APPLICATION_CREDENTIALS`: The path to a configuration file for GCP authentication. Automatically set locally by the `gcloud` CLI, and set manually during the build process. ### Run the worker diff --git a/apps/hash-ai-worker-ts/docker/Dockerfile b/apps/hash-ai-worker-ts/docker/Dockerfile index c4f3a89ff29..62c12f35592 100644 --- a/apps/hash-ai-worker-ts/docker/Dockerfile +++ b/apps/hash-ai-worker-ts/docker/Dockerfile @@ -63,6 +63,17 @@ WORKDIR /usr/local/src/apps/hash-ai-worker-ts ENTRYPOINT [ "yarn", "--cache-folder", "/tmp/yarn-cache", "--global-folder", "/tmp/yarn-global" ] CMD ["start"] +ARG GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON +ENV GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON=${GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON} + +RUN if [ -n "$GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON" ]; then \ + echo $GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON > /tmp/google_workload_identity_federation_config.json && \ + export GOOGLE_APPLICATION_CREDENTIALS=/tmp/google_workload_identity_federation_config.json && \ + echo "GOOGLE_APPLICATION_CREDENTIALS set from JSON"; \ + else \ + echo "GOOGLE_APPLICATION_CREDENTIALS not set, no GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON in environment"; \ + fi + RUN apt-get update && \ apt-get install -y --no-install-recommends curl && \ rm -rf /var/lib/apt/lists/* && \ diff --git a/apps/hash-ai-worker-ts/package.json b/apps/hash-ai-worker-ts/package.json index af0f609e18d..5424c4841b5 100644 --- a/apps/hash-ai-worker-ts/package.json +++ b/apps/hash-ai-worker-ts/package.json @@ -46,6 +46,8 @@ "@apps/hash-graph": "0.0.0-private", "@blockprotocol/graph": "0.4.0-canary.0", "@blockprotocol/type-system": "0.1.2-canary.0", + "@google-cloud/storage": "7.14.0", + "@google-cloud/vertexai": "1.9.0", "@local/advanced-types": "0.0.0-private", "@local/hash-backend-utils": "0.0.0-private", "@local/hash-graph-client": "0.0.0-private", @@ -82,6 +84,7 @@ "openai": "4.68.4", "openai-chat-tokens": "0.2.8", "papaparse": "5.4.1", + "pdf2json": "3.1.4", "puppeteer": "22.15.0", "puppeteer-extra": "3.3.6", "puppeteer-extra-plugin-stealth": "2.11.2", diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities.ts index 9b0e6533bac..398b9d7f7c5 100644 --- a/apps/hash-ai-worker-ts/src/activities/flow-activities.ts +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities.ts @@ -8,6 +8,7 @@ import { getFileFromUrlAction } from "./flow-activities/get-file-from-url-action import { getWebPageByUrlAction } from "./flow-activities/get-web-page-by-url-action.js"; import { getWebPageSummaryAction } from "./flow-activities/get-web-page-summary-action.js"; import { inferEntitiesFromContentAction } from "./flow-activities/infer-entities-from-content-action.js"; +import { inferMetadataFromDocumentAction } from "./flow-activities/infer-metadata-from-document-action.js"; import { persistEntitiesAction } from "./flow-activities/persist-entities-action.js"; import { persistEntityAction } from "./flow-activities/persist-entity-action.js"; import { persistFlowActivity } from "./flow-activities/persist-flow-activity.js"; @@ -28,6 +29,7 @@ export const createFlowActionActivities = ({ getWebPageByUrlAction, processAutomaticBrowsingSettingsAction, inferEntitiesFromContentAction, + inferMetadataFromDocumentAction, persistEntityAction, persistEntitiesAction, getFileFromUrlAction, diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/generate-flow-run-name-activity.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/generate-flow-run-name-activity.ts index 635e803f7bd..243376e6c3b 100644 --- a/apps/hash-ai-worker-ts/src/activities/flow-activities/generate-flow-run-name-activity.ts +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/generate-flow-run-name-activity.ts @@ -30,11 +30,15 @@ const systemPrompt = ` You are a workflow naming agent. A workflow is an automated process that produces a result of interest. Multiple workflows of the same kind are run with different inputs, and the user requires a unique name for each run, to distinguish it from other runs of the same kind. -The user provides you with a description of the goal of the workflow, or a description of the template and a list of its inputs, and you generate a short name for the run. Provide only the name – don't include any other text. +The user provides you with a description of the goal of the workflow, or a description of the template and a list of its inputs, and you generate a short name for the run. Provide only the name – don't include any other text. If there are no inputs provided, you can generate a name based on the template description alone. The name should be descriptive enough to distinguish it from other runs from the same template, and must always be a single human-readable sentence, with proper grammar and spacing between words. + + Don't include any quotation marks or special characters around the name. Don't include the word 'workflow' in the name – the user already knows it's a workflow. +Don't include UUIDs or other identifiers that aren't natural language words. Omit them, or use a generic human-readable replacement (e.g. 'entity'). + `; const getModelSuggestedFlowRunName = async ( @@ -79,7 +83,11 @@ const getModelSuggestedFlowRunName = async ( return text; }; -const outputKindsToIgnore: PayloadKind[] = ["GoogleSheet", "GoogleAccountId"]; +const outputKindsToIgnore: PayloadKind[] = [ + "GoogleSheet", + "GoogleAccountId", + "EntityId", +]; export const generateFlowRunName = async ( params: PersistFlowActivityParams, @@ -144,13 +152,20 @@ export const generateFlowRunName = async ( !outputKindsToIgnore.includes(output.payload.kind), ); + let workflowDescriptionString = `The workflow template is named ${ + flowDefinition.name + } with a description of ${flowDefinition.description}.`; + + if (inputsOfInterest?.length) { + workflowDescriptionString += ` The inputs to the workflow run to be named: ${inputsOfInterest + .map((input) => JSON.stringify(input)) + .join("\n")}.`; + } else { + workflowDescriptionString += ` The workflow run to be named has no inputs.`; + } + return getModelSuggestedFlowRunName( - `The workflow template is named ${ - flowDefinition.name - } with a description of ${flowDefinition.description}. - The inputs to the workflow run to be named: ${inputsOfInterest - ?.map((input) => JSON.stringify(input)) - .join("\n")}`, + workflowDescriptionString, usageTrackingParams, ); }; diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/get-file-from-url-action.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/get-file-from-url-action.ts index 36f5838c6fe..04719a25f30 100644 --- a/apps/hash-ai-worker-ts/src/activities/flow-activities/get-file-from-url-action.ts +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/get-file-from-url-action.ts @@ -6,7 +6,7 @@ import { StatusCode } from "@local/status"; import { Context } from "@temporalio/activity"; import { logProgress } from "../shared/log-progress.js"; -import { getFileEntityFromUrl } from "./shared/get-file-entity-from-url.js"; +import { createFileEntityFromUrl } from "./shared/create-file-entity-from-url.js"; import type { FlowActionActivity } from "./types.js"; export const getFileFromUrlAction: FlowActionActivity = async ({ inputs }) => { @@ -19,17 +19,17 @@ export const getFileFromUrlAction: FlowActionActivity = async ({ inputs }) => { actionType: "getFileFromUrl", }); - const getFileEntityFromUrlStatus = await getFileEntityFromUrl({ + const createFileEntityFromUrlStatus = await createFileEntityFromUrl({ entityUuid: null, url: originalUrl, description, displayName, }); - if (getFileEntityFromUrlStatus.status !== "ok") { + if (createFileEntityFromUrlStatus.status !== "ok") { return { code: StatusCode.Internal, - message: getFileEntityFromUrlStatus.message, + message: createFileEntityFromUrlStatus.message, contents: [], }; } @@ -37,7 +37,7 @@ export const getFileFromUrlAction: FlowActionActivity = async ({ inputs }) => { // @todo look for an existing file with the same originalUrl in the graph, and update it if found? const operation = "create" as const; - const fileEntity = getFileEntityFromUrlStatus.entity.toJSON(); + const fileEntity = createFileEntityFromUrlStatus.entity.toJSON(); logProgress([ { diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action.ts new file mode 100644 index 00000000000..28b445bf670 --- /dev/null +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action.ts @@ -0,0 +1,346 @@ +import { createWriteStream } from "node:fs"; +import { mkdir, unlink } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { Readable } from "node:stream"; +import { finished } from "node:stream/promises"; +import type { ReadableStream } from "node:stream/web"; +import { fileURLToPath } from "node:url"; + +import { getAwsS3Config } from "@local/hash-backend-utils/aws-config"; +import { AwsS3StorageProvider } from "@local/hash-backend-utils/file-storage/aws-s3-storage-provider"; +import type { + OriginProvenance, + PropertyProvenance, + SourceProvenance, +} from "@local/hash-graph-client"; +import type { + EnforcedEntityEditionProvenance, + Entity, +} from "@local/hash-graph-sdk/entity"; +import { + getSimplifiedActionInputs, + type OutputNameForAction, +} from "@local/hash-isomorphic-utils/flows/action-definitions"; +import type { PersistedEntity } from "@local/hash-isomorphic-utils/flows/types"; +import { generateUuid } from "@local/hash-isomorphic-utils/generate-uuid"; +import { + blockProtocolPropertyTypes, + systemEntityTypes, + systemPropertyTypes, +} from "@local/hash-isomorphic-utils/ontology-type-ids"; +import type { File } from "@local/hash-isomorphic-utils/system-types/shared"; +import { extractEntityUuidFromEntityId } from "@local/hash-subgraph"; +import { StatusCode } from "@local/status"; +import { Context } from "@temporalio/activity"; +import type { Output } from "pdf2json"; +import PDFParser from "pdf2json"; + +import { getAiAssistantAccountIdActivity } from "../get-ai-assistant-account-id-activity.js"; +import { createInferredEntityNotification } from "../shared/create-inferred-entity-notification.js"; +import { getEntityByFilter } from "../shared/get-entity-by-filter.js"; +import { getFlowContext } from "../shared/get-flow-context.js"; +import { graphApiClient } from "../shared/graph-api-client.js"; +import { logProgress } from "../shared/log-progress.js"; +import { generateDocumentPropertyPatches } from "./infer-metadata-from-document-action/generate-property-patches.js"; +import { generateDocumentProposedEntitiesAndCreateClaims } from "./infer-metadata-from-document-action/generate-proposed-entities-and-claims.js"; +import { getLlmAnalysisOfDoc } from "./infer-metadata-from-document-action/get-llm-analysis-of-doc.js"; +import type { FlowActionActivity } from "./types.js"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const baseFilePath = join(__dirname, "/var/tmp_files"); + +export const inferMetadataFromDocumentAction: FlowActionActivity = async ({ + inputs, +}) => { + const { + flowEntityId, + stepId, + userAuthentication: { actorId: userActorId }, + webId, + } = await getFlowContext(); + + const { documentEntityId } = getSimplifiedActionInputs({ + inputs, + actionType: "inferMetadataFromDocument", + }); + + const aiAssistantAccountId = await getAiAssistantAccountIdActivity({ + authentication: { actorId: userActorId }, + graphApiClient, + grantCreatePermissionForWeb: webId, + }); + + if (!aiAssistantAccountId) { + return { + code: StatusCode.FailedPrecondition, + contents: [], + message: `Could not get AI assistant account for web ${webId}`, + }; + } + + const documentEntity = await getEntityByFilter({ + actorId: aiAssistantAccountId, + includeDrafts: false, + filter: { + all: [ + { + equal: [{ path: ["ownedById"] }, { parameter: webId }], + }, + { + equal: [ + { path: ["uuid"] }, + { parameter: extractEntityUuidFromEntityId(documentEntityId) }, + ], + }, + ], + }, + graphApiClient, + }); + + if (!documentEntity) { + return { + code: StatusCode.NotFound, + contents: [], + message: `Could not find or access document entity with entityId ${documentEntityId}`, + }; + } + + const fileUrl = + documentEntity.properties[ + blockProtocolPropertyTypes.fileUrl.propertyTypeBaseUrl + ]; + + if (!fileUrl) { + return { + code: StatusCode.InvalidArgument, + contents: [], + message: `Document entity with entityId ${documentEntityId} does not have a fileUrl property`, + }; + } + + if (typeof fileUrl !== "string") { + return { + code: StatusCode.InvalidArgument, + contents: [], + message: `Document entity with entityId ${documentEntityId} has a fileUrl property of type '${typeof fileUrl}', expected 'string'`, + }; + } + + const storageKey = + documentEntity.properties[ + systemPropertyTypes.fileStorageKey.propertyTypeBaseUrl + ]; + + if (!storageKey) { + return { + code: StatusCode.InvalidArgument, + contents: [], + message: `Document entity with entityId ${documentEntityId} does not have a fileStorageKey property`, + }; + } + + if (typeof storageKey !== "string") { + return { + code: StatusCode.InvalidArgument, + contents: [], + message: `Document entity with entityId ${documentEntityId} has a fileStorageKey property of type '${typeof storageKey}', expected 'string'`, + }; + } + + await mkdir(baseFilePath, { recursive: true }); + + const filePath = `${baseFilePath}/${generateUuid()}.pdf`; + + const s3Config = getAwsS3Config(); + + const downloadProvider = new AwsS3StorageProvider(s3Config); + + const urlForDownload = await downloadProvider.presignDownload({ + entity: documentEntity as Entity, + expiresInSeconds: 60 * 60, + key: storageKey, + }); + + const fetchFileResponse = await fetch(urlForDownload); + + if (!fetchFileResponse.ok || !fetchFileResponse.body) { + return { + code: StatusCode.NotFound, + contents: [], + message: `Document entity with entityId ${documentEntityId} has a fileUrl ${fileUrl} that could not be fetched: ${fetchFileResponse.statusText}`, + }; + } + + try { + const fileStream = createWriteStream(filePath); + await finished( + Readable.fromWeb( + fetchFileResponse.body as ReadableStream, + ).pipe(fileStream), + ); + } catch (error) { + await unlink(filePath); + return { + code: StatusCode.Internal, + contents: [], + message: `Failed to write file to file system: ${(error as Error).message}`, + }; + } + + const pdfParser = new PDFParser(); + + const documentJson = await new Promise((resolve, reject) => { + pdfParser.on("pdfParser_dataError", (errData) => + reject(errData.parserError), + ); + + pdfParser.on("pdfParser_dataReady", (pdfData) => { + resolve(pdfData); + }); + + pdfParser.loadPDF(filePath).catch((err) => reject(err)); + }); + + const numberOfPages = documentJson.Pages.length; + + /** + * @todo H-3620: handle documents exceeding Vertex AI limit of 30MB + */ + + const documentMetadata = await getLlmAnalysisOfDoc({ + fileSystemPath: filePath, + hashFileStorageKey: storageKey, + }); + + await unlink(filePath); + + const { + authors, + doi, + doiLink, + isbn, + publishedBy, + publishedInYear, + publicationVenue, + summary, + title, + type, + } = documentMetadata; + + const entityTypeIds = new Set(documentEntity.metadata.entityTypeIds); + + if (type === "AcademicPaper") { + entityTypeIds.add(systemEntityTypes.academicPaper.entityTypeId); + } else if (type === "Book") { + entityTypeIds.add(systemEntityTypes.book.entityTypeId); + } else { + entityTypeIds.add(systemEntityTypes.doc.entityTypeId); + } + + const sourceProvenance: SourceProvenance = { + type: "document", + authors: (authors ?? []).map((author) => author.name), + entityId: documentEntityId, + location: { uri: fileUrl }, + }; + + const provenance: EnforcedEntityEditionProvenance = { + actorType: "ai", + origin: { + type: "flow", + id: flowEntityId, + stepIds: [stepId], + } satisfies OriginProvenance, + sources: [sourceProvenance], + }; + + const propertyProvenance: PropertyProvenance = { + sources: [sourceProvenance], + }; + + const propertyPatches = generateDocumentPropertyPatches( + { + doi, + doiLink, + isbn, + numberOfPages, + publishedInYear, + summary, + title, + type, + }, + propertyProvenance, + ); + + const existingEntity = documentEntity.toJSON(); + + const updatedEntity = await documentEntity.patch( + graphApiClient, + { actorId: aiAssistantAccountId }, + { + entityTypeIds: [...entityTypeIds], + propertyPatches, + provenance, + }, + ); + + await createInferredEntityNotification({ + entity: updatedEntity, + graphApiClient, + operation: "update", + notifiedUserAccountId: userActorId, + }); + + const persistedDocumentEntity: PersistedEntity = { + entity: updatedEntity.toJSON(), + existingEntity, + operation: "update", + }; + + logProgress([ + { + persistedEntity: persistedDocumentEntity, + recordedAt: new Date().toISOString(), + stepId: Context.current().info.activityId, + type: "PersistedEntity", + }, + ]); + + const proposedEntities = + await generateDocumentProposedEntitiesAndCreateClaims({ + aiAssistantAccountId, + documentEntityId, + documentMetadata: { authors, publishedBy, publicationVenue }, + documentTitle: title, + provenance, + propertyProvenance, + }); + + return { + code: StatusCode.Ok, + contents: [ + { + outputs: [ + { + outputName: + "proposedEntities" satisfies OutputNameForAction<"inferMetadataFromDocument">, + payload: { + kind: "ProposedEntity", + value: proposedEntities, + }, + }, + { + outputName: + "updatedDocumentEntity" satisfies OutputNameForAction<"inferMetadataFromDocument">, + payload: { + kind: "PersistedEntity", + value: persistedDocumentEntity, + }, + }, + ], + }, + ], + }; +}; diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-property-patches.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-property-patches.ts new file mode 100644 index 00000000000..47dd2dbe400 --- /dev/null +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-property-patches.ts @@ -0,0 +1,187 @@ +import type { PropertyProvenance } from "@local/hash-graph-client"; +import type { PropertyPatchOperation } from "@local/hash-graph-types/entity"; +import type { BaseUrl } from "@local/hash-graph-types/ontology"; +import type { + AcademicPaperProperties, + DOILinkPropertyValueWithMetadata, + DOIPropertyValueWithMetadata, + NumberOfPagesPropertyValueWithMetadata, + SummaryPropertyValueWithMetadata, +} from "@local/hash-isomorphic-utils/system-types/academicpaper"; +import type { + BookProperties, + ISBNPropertyValueWithMetadata, +} from "@local/hash-isomorphic-utils/system-types/book"; +import type { + DocProperties, + PublicationYearPropertyValueWithMetadata, + TitlePropertyValueWithMetadata, +} from "@local/hash-isomorphic-utils/system-types/shared"; + +import { logger } from "../../shared/activity-logger.js"; +import type { DocumentMetadata } from "./get-llm-analysis-of-doc.js"; + +export const generateDocumentPropertyPatches = ( + documentMetadata: Pick< + DocumentMetadata, + | "doi" + | "doiLink" + | "isbn" + | "publishedInYear" + | "summary" + | "title" + | "type" + > & { numberOfPages: number }, + provenance: PropertyProvenance, +): PropertyPatchOperation[] => { + const propertyPatches: PropertyPatchOperation[] = []; + + const { + doi, + doiLink, + isbn, + numberOfPages, + publishedInYear, + summary, + title, + type, + } = documentMetadata; + + const numPagesKey = + "https://hash.ai/@hash/types/property-type/number-of-pages/" satisfies keyof DocProperties; + + propertyPatches.push({ + op: "add", + path: [numPagesKey as BaseUrl], + property: { + value: numberOfPages, + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/number/v/1", + provenance, + }, + } satisfies NumberOfPagesPropertyValueWithMetadata, + }); + + const summaryKey = + "https://hash.ai/@hash/types/property-type/summary/" satisfies keyof DocProperties; + + propertyPatches.push({ + op: "add", + path: [summaryKey as BaseUrl], + property: { + value: summary, + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance, + }, + } satisfies SummaryPropertyValueWithMetadata, + }); + + if (title) { + const key = + "https://hash.ai/@hash/types/property-type/title/" satisfies keyof DocProperties; + + propertyPatches.push({ + op: "add", + path: [key as BaseUrl], + property: { + value: title, + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance, + }, + } satisfies TitlePropertyValueWithMetadata, + }); + } + + if (publishedInYear) { + const key = + "https://hash.ai/@hash/types/property-type/publication-year/" satisfies keyof DocProperties; + + propertyPatches.push({ + op: "add", + path: [key as BaseUrl], + property: { + value: publishedInYear, + metadata: { + dataTypeId: "https://hash.ai/@hash/types/data-type/year/v/1", + provenance, + }, + } satisfies PublicationYearPropertyValueWithMetadata, + }); + } + + if (doi) { + if (type !== "AcademicPaper") { + logger.warn( + `DOI of ${doi} was provided for non-academic paper type ${type}, ignoring.`, + ); + } else { + const key = + "https://hash.ai/@hash/types/property-type/doi/" satisfies keyof AcademicPaperProperties; + + propertyPatches.push({ + op: "add", + path: [key as BaseUrl], + property: { + value: doi, + metadata: { + dataTypeId: "https://hash.ai/@hash/types/data-type/doi/v/1", + provenance, + }, + } satisfies DOIPropertyValueWithMetadata, + }); + } + } + + if (doiLink) { + if (type !== "AcademicPaper") { + logger.warn( + `DOI Link of ${doiLink} was provided for non-academic paper type ${type}, ignoring.`, + ); + } else { + const key = + "https://hash.ai/@hash/types/property-type/doi-link/" satisfies keyof AcademicPaperProperties; + + propertyPatches.push({ + op: "add", + path: [key as BaseUrl], + property: { + value: doiLink, + metadata: { + dataTypeId: "https://hash.ai/@hash/types/data-type/uri/v/1", + provenance, + }, + } satisfies DOILinkPropertyValueWithMetadata, + }); + } + } + + if (isbn) { + if (type !== "Book") { + logger.warn( + `ISBN of ${isbn} was provided for non-book type ${type}, ignoring.`, + ); + } else { + const key = + "https://hash.ai/@hash/types/property-type/isbn/" satisfies keyof BookProperties; + + propertyPatches.push({ + op: "add", + path: [key as BaseUrl], + property: { + value: isbn, + metadata: { + dataTypeId: "https://hash.ai/@hash/types/data-type/isbn/v/1", + provenance, + }, + } satisfies ISBNPropertyValueWithMetadata, + }); + } + } + + return propertyPatches; +}; diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-proposed-entities-and-claims.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-proposed-entities-and-claims.ts new file mode 100644 index 00000000000..7d2036938ff --- /dev/null +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/generate-proposed-entities-and-claims.ts @@ -0,0 +1,328 @@ +import type { PropertyProvenance } from "@local/hash-graph-client"; +import type { EnforcedEntityEditionProvenance } from "@local/hash-graph-sdk/entity"; +import { Entity, LinkEntity } from "@local/hash-graph-sdk/entity"; +import type { AccountId } from "@local/hash-graph-types/account"; +import type { + EntityId, + EntityUuid, + PropertyMetadataObject, +} from "@local/hash-graph-types/entity"; +import type { OwnedById } from "@local/hash-graph-types/web"; +import type { ProposedEntity } from "@local/hash-isomorphic-utils/flows/types"; +import { generateUuid } from "@local/hash-isomorphic-utils/generate-uuid"; +import { createDefaultAuthorizationRelationships } from "@local/hash-isomorphic-utils/graph-queries"; +import { + blockProtocolPropertyTypes, + systemEntityTypes, + systemLinkEntityTypes, +} from "@local/hash-isomorphic-utils/ontology-type-ids"; +import type { + Claim as ClaimEntity, + HasObject, +} from "@local/hash-isomorphic-utils/system-types/claim"; +import type { + InstitutionProperties, + PersonProperties, + TextDataTypeMetadata, +} from "@local/hash-isomorphic-utils/system-types/shared"; +import { entityIdFromComponents } from "@local/hash-subgraph"; +import { Context } from "@temporalio/activity"; + +import { getFlowContext } from "../../shared/get-flow-context.js"; +import { graphApiClient } from "../../shared/graph-api-client.js"; +import { logProgress } from "../../shared/log-progress.js"; +import type { DocumentMetadata } from "./get-llm-analysis-of-doc.js"; + +const createClaim = async ({ + claimText, + creatorActorId, + draft, + objectText, + ownedById, + propertyProvenance, + provenance, + subjectText, + userActorId, +}: { + claimText: string; + creatorActorId: AccountId; + draft: boolean; + objectText: string; + ownedById: OwnedById; + propertyProvenance: PropertyProvenance; + provenance: EnforcedEntityEditionProvenance; + subjectText: string; + userActorId: AccountId; +}) => { + return await Entity.create( + graphApiClient, + { actorId: creatorActorId }, + { + draft, + entityUuid: generateUuid() as EntityUuid, + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/claim/v/1"], + ownedById, + provenance, + relationships: createDefaultAuthorizationRelationships({ + actorId: userActorId, + }), + properties: { + value: { + "https://blockprotocol.org/@blockprotocol/types/property-type/textual-content/": + { + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance: propertyProvenance, + }, + value: claimText, + }, + "https://hash.ai/@hash/types/property-type/subject/": { + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance: propertyProvenance, + }, + value: subjectText, + }, + + "https://hash.ai/@hash/types/property-type/object/": { + metadata: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance: propertyProvenance, + }, + value: objectText, + }, + }, + }, + }, + ); +}; + +export const generateDocumentProposedEntitiesAndCreateClaims = async ({ + aiAssistantAccountId, + documentMetadata, + documentEntityId, + documentTitle, + provenance, + propertyProvenance, +}: { + aiAssistantAccountId: AccountId; + documentMetadata: Pick< + DocumentMetadata, + "authors" | "publicationVenue" | "publishedBy" + >; + documentEntityId: EntityId; + documentTitle: string; + provenance: EnforcedEntityEditionProvenance; + propertyProvenance: PropertyProvenance; +}): Promise => { + const { + authors, + /** @todo H-3619: Infer info on publisher and venue, and link to docs */ + publicationVenue: _publicationVenue, + publishedBy: _publishedBy, + } = documentMetadata; + + const { createEntitiesAsDraft, webId, userAuthentication } = + await getFlowContext(); + + const textDataTypeMetadata: TextDataTypeMetadata = { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + provenance: propertyProvenance, + }; + + const nameOnlyPropertyMetadata: PropertyMetadataObject = { + value: { + [blockProtocolPropertyTypes.name.propertyTypeBaseUrl]: { + metadata: textDataTypeMetadata, + }, + }, + }; + + const proposedEntities: ProposedEntity[] = []; + + const institutionEntityIdByName: Record = {}; + + for (const author of authors ?? []) { + const { name: authorName, affiliatedWith } = author; + + const entityUuid = generateUuid() as EntityUuid; + + const authorProperties: PersonProperties = { + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": + authorName, + }; + + const authorEntityId = entityIdFromComponents(webId, entityUuid); + + /** + * Create a claim about the person having authored the document + */ + const authorToDocClaim = await createClaim({ + claimText: `${authorName} authored ${documentTitle}`, + creatorActorId: aiAssistantAccountId, + draft: createEntitiesAsDraft, + objectText: documentTitle, + ownedById: webId, + propertyProvenance, + provenance, + subjectText: authorName, + userActorId: userAuthentication.actorId, + }); + + /** + * Link the authorship claim to the document entity + * + * @todo H-3152 update persist-entity to handle updates to existing entities, and let claim link creation happen there + */ + await LinkEntity.create( + graphApiClient, + { actorId: aiAssistantAccountId }, + { + draft: createEntitiesAsDraft, + entityTypeIds: [systemLinkEntityTypes.hasObject.linkEntityTypeId], + ownedById: webId, + provenance, + linkData: { + leftEntityId: authorToDocClaim.entityId, + rightEntityId: documentEntityId, + }, + relationships: createDefaultAuthorizationRelationships({ + actorId: userAuthentication.actorId, + }), + properties: { value: {} }, + }, + ); + + const authorClaims: ProposedEntity["claims"] = { + isObjectOf: [], + isSubjectOf: [authorToDocClaim.entityId], + }; + + const authorProposedEntity: ProposedEntity = { + claims: authorClaims, + entityTypeIds: [systemEntityTypes.person.entityTypeId], + localEntityId: authorEntityId, + properties: authorProperties, + propertyMetadata: nameOnlyPropertyMetadata, + provenance, + }; + + proposedEntities.push(authorProposedEntity); + + /** + * Propose the link between the document and the author entity + */ + const emptyClaims: ProposedEntity["claims"] = { + isObjectOf: [], + isSubjectOf: [], + }; + proposedEntities.push({ + claims: emptyClaims, + entityTypeIds: [systemLinkEntityTypes.authoredBy.linkEntityTypeId], + localEntityId: entityIdFromComponents( + webId, + generateUuid() as EntityUuid, + ), + properties: {}, + propertyMetadata: { value: {} }, + provenance, + sourceEntityId: { + kind: "existing-entity", + entityId: documentEntityId, + }, + targetEntityId: { kind: "proposed-entity", localId: authorEntityId }, + }); + + for (const affiliateName of affiliatedWith ?? []) { + let institutionEntityId = institutionEntityIdByName[affiliateName]; + if (!institutionEntityId) { + institutionEntityId = entityIdFromComponents( + webId, + generateUuid() as EntityUuid, + ); + + const properties: InstitutionProperties = { + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": + affiliateName, + }; + + /** + * Create a claim about the person being affiliated with the institution. + * This will be linked to the institution entity in the persist-entity step + */ + const authorToInstitutionClaim = await createClaim({ + claimText: `${authorName} is affiliated with ${affiliateName}`, + creatorActorId: aiAssistantAccountId, + draft: createEntitiesAsDraft, + objectText: affiliateName, + ownedById: webId, + propertyProvenance, + provenance, + subjectText: authorName, + userActorId: userAuthentication.actorId, + }); + authorProposedEntity.claims.isSubjectOf.push( + authorToInstitutionClaim.entityId, + ); + + proposedEntities.push({ + claims: { + isSubjectOf: [], + isObjectOf: [authorToInstitutionClaim.entityId], + }, + entityTypeIds: [systemEntityTypes.institution.entityTypeId], + localEntityId: institutionEntityId, + properties, + propertyMetadata: nameOnlyPropertyMetadata, + provenance, + }); + + institutionEntityIdByName[affiliateName] = institutionEntityId; + } + + /** + * Create the link between the person and the institution entity + */ + proposedEntities.push({ + claims: emptyClaims, + entityTypeIds: [systemLinkEntityTypes.affiliatedWith.linkEntityTypeId], + localEntityId: entityIdFromComponents( + webId, + generateUuid() as EntityUuid, + ), + properties: {}, + propertyMetadata: { value: {} }, + provenance, + sourceEntityId: { kind: "proposed-entity", localId: authorEntityId }, + targetEntityId: { + kind: "proposed-entity", + localId: institutionEntityId, + }, + }); + } + } + + const workerId = generateUuid(); + + for (const proposedEntity of proposedEntities) { + logProgress([ + { + isUpdateToExistingProposal: false, + parentInstanceId: null, + proposedEntity, + recordedAt: new Date().toISOString(), + stepId: Context.current().info.activityId, + toolCallId: "generateDocumentProposedEntities", + type: "ProposedEntity", + workerInstanceId: workerId, + workerType: "Document analyzer", + }, + ]); + } + + return proposedEntities; +}; diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/get-llm-analysis-of-doc.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/get-llm-analysis-of-doc.ts new file mode 100644 index 00000000000..8ab73bae0f7 --- /dev/null +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/infer-metadata-from-document-action/get-llm-analysis-of-doc.ts @@ -0,0 +1,220 @@ +import { Storage } from "@google-cloud/storage"; +import type { + GenerativeModel, + Part, + ResponseSchema, +} from "@google-cloud/vertexai"; +import { SchemaType, VertexAI } from "@google-cloud/vertexai"; + +import { logger } from "../../shared/activity-logger.js"; + +/** + * Ideally we'd use something like Zod and zod-to-json-schema to define the schema, to have the type automatically + * inferred, but the generated schema is not compatible with the Vertex AI schema, due to the latter's use of enums for + * the type field. + * + * @todo use Zod and rewrite the bits of the schema that are incompatible with Vertex AI's schema type + */ +const documentMetadataSchema: ResponseSchema = { + type: SchemaType.OBJECT, + properties: { + authors: { + type: SchemaType.ARRAY, + items: { + type: SchemaType.OBJECT, + properties: { + affiliatedWith: { + type: SchemaType.ARRAY, + items: { + type: SchemaType.STRING, + }, + description: + "Any institution(s) or organization(s) that the document identifies the author as being affiliated with", + }, + name: { + description: "The name of the author", + type: SchemaType.STRING, + }, + }, + }, + }, + doi: { + description: "The DOI for this document, if provided", + type: SchemaType.STRING, + }, + doiLink: { + description: "The DOI link for this document, if provided", + type: SchemaType.STRING, + }, + isbn: { + description: + "The ISBN for this document, if it is a book and the ISBN is provided", + type: SchemaType.STRING, + }, + publishedBy: { + description: "The publisher of this document, if available.", + type: SchemaType.OBJECT, + properties: { + name: { type: SchemaType.STRING }, + }, + }, + publicationVenue: { + description: + "The venue in which this document/paper was published, if available", + type: SchemaType.OBJECT, + properties: { + title: { type: SchemaType.STRING }, + }, + }, + publishedInYear: { + description: + "The year (first year if a reprint) in which this document was published", + type: SchemaType.INTEGER, + }, + summary: { + description: "A one paragraph summary of this document", + type: SchemaType.STRING, + }, + title: { description: "The document's title", type: SchemaType.STRING }, + type: { + description: + "If the specific type of this document is known, it can be provided here. Valid options are 'Book' and 'AcademicPaper'", + format: "enum", + enum: ["AcademicPaper", "Book"], + type: SchemaType.STRING, + }, + }, + required: ["summary", "title"], +}; + +export type DocumentMetadata = { + authors?: { name: string; affiliatedWith?: string[] }[]; + doi?: string; + doiLink?: string; + isbn?: string; + publishedBy?: { + name: string; + }; + publicationVenue?: { + title: string; + }; + publishedInYear?: number; + summary: string; + title: string; + type?: "AcademicPaper" | "Book"; +}; + +const googleCloudProjectId = process.env.GOOGLE_CLOUD_HASH_PROJECT_ID; + +let _generativeModel: GenerativeModel | undefined; + +const getGeminiModel = () => { + if (!googleCloudProjectId) { + throw new Error( + "GOOGLE_CLOUD_HASH_PROJECT_ID environment variable is not set", + ); + } + + if (_generativeModel) { + return _generativeModel; + } + + const vertexAI = new VertexAI({ + project: googleCloudProjectId, + location: "us-east4", + }); + + const generativeModel = vertexAI.getGenerativeModel({ + model: "gemini-1.5-pro", + generationConfig: { + responseMimeType: "application/json", + responseSchema: documentMetadataSchema, + }, + }); + + _generativeModel = generativeModel; + + return generativeModel; +}; + +let _googleCloudStorage: Storage | undefined; + +const storageBucket = process.env.GOOGLE_CLOUD_STORAGE_BUCKET; + +const getGoogleCloudStorage = () => { + if (_googleCloudStorage) { + return _googleCloudStorage; + } + + const storage = new Storage(); + _googleCloudStorage = storage; + + return storage; +}; + +export const getLlmAnalysisOfDoc = async ({ + hashFileStorageKey, + fileSystemPath, +}: { + hashFileStorageKey: string; + fileSystemPath: string; +}): Promise => { + if (!storageBucket) { + throw new Error( + "GOOGLE_CLOUD_STORAGE_BUCKET environment variable is not set", + ); + } + + const gemini = getGeminiModel(); + + const storage = getGoogleCloudStorage(); + + const cloudStorageFilePath = `gs://${storageBucket}/${hashFileStorageKey}`; + + try { + await storage.bucket(storageBucket).file(hashFileStorageKey).getMetadata(); + + logger.info( + `Already exists in Google Cloud Storage: HASH key ${hashFileStorageKey} in ${storageBucket} bucket`, + ); + } catch (err) { + if ("code" in (err as Error) && (err as { code: unknown }).code === 404) { + await storage + .bucket(storageBucket) + .upload(fileSystemPath, { destination: hashFileStorageKey }); + logger.info( + `Uploaded to Google Cloud Storage: HASH key ${hashFileStorageKey} in ${storageBucket} bucket`, + ); + } else { + throw err; + } + } + + const filePart: Part = { + fileData: { + fileUri: cloudStorageFilePath, + mimeType: "application/pdf", + }, + }; + + const textPart = { + text: `Please provide a summary of this document, and any of the requested metadata you can infer from it. + If you're not confident about any of the metadata fields, omit them.`, + }; + + const request = { + contents: [{ role: "user", parts: [filePart, textPart] }], + }; + + const resp = await gemini.generateContent(request); + + const contentResponse = resp.response.candidates?.[0]?.content.parts[0]?.text; + + if (!contentResponse) { + throw new Error("No content response from LLM analysis"); + } + + const parsedResponse = JSON.parse(contentResponse) as DocumentMetadata; + + return parsedResponse; +}; diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/persist-entity-action.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/persist-entity-action.ts index 08df99a0ce7..389f2d2b609 100644 --- a/apps/hash-ai-worker-ts/src/activities/flow-activities/persist-entity-action.ts +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/persist-entity-action.ts @@ -34,7 +34,7 @@ import { createInferredEntityNotification } from "../shared/create-inferred-enti import { getFlowContext } from "../shared/get-flow-context.js"; import { graphApiClient } from "../shared/graph-api-client.js"; import { logProgress } from "../shared/log-progress.js"; -import { getFileEntityFromUrl } from "./shared/get-file-entity-from-url.js"; +import { createFileEntityFromUrl } from "./shared/create-file-entity-from-url.js"; import { getEntityUpdate, getLatestEntityById, @@ -44,7 +44,7 @@ import type { FlowActionActivity } from "./types.js"; export const fileEntityTypeIds: VersionedUrl[] = [ systemEntityTypes.file.entityTypeId, systemEntityTypes.image.entityTypeId, - systemEntityTypes.document.entityTypeId, + systemEntityTypes.documentFile.entityTypeId, systemEntityTypes.pdfDocument.entityTypeId, systemEntityTypes.docxDocument.entityTypeId, systemEntityTypes.spreadsheetFile.entityTypeId, @@ -131,7 +131,7 @@ export const persistEntityAction: FlowActionActivity = async ({ inputs }) => { if (isFileEntity && fileUrl) { operation = "create"; - const getFileEntityFromUrlStatus = await getFileEntityFromUrl({ + const createFileEntityFromUrlStatus = await createFileEntityFromUrl({ entityUuid, url: fileUrl, propertyMetadata, @@ -139,10 +139,10 @@ export const persistEntityAction: FlowActionActivity = async ({ inputs }) => { entityTypeIds, }); - if (getFileEntityFromUrlStatus.status !== "ok") { + if (createFileEntityFromUrlStatus.status !== "ok") { return { code: StatusCode.Internal, - message: getFileEntityFromUrlStatus.message, + message: createFileEntityFromUrlStatus.message, contents: [ { outputs: [ @@ -160,7 +160,7 @@ export const persistEntityAction: FlowActionActivity = async ({ inputs }) => { }; } - const { entity: updatedEntity } = getFileEntityFromUrlStatus; + const { entity: updatedEntity } = createFileEntityFromUrlStatus; entity = updatedEntity; } else { diff --git a/apps/hash-ai-worker-ts/src/activities/flow-activities/shared/get-file-entity-from-url.ts b/apps/hash-ai-worker-ts/src/activities/flow-activities/shared/create-file-entity-from-url.ts similarity index 99% rename from apps/hash-ai-worker-ts/src/activities/flow-activities/shared/get-file-entity-from-url.ts rename to apps/hash-ai-worker-ts/src/activities/flow-activities/shared/create-file-entity-from-url.ts index dc5dd125292..ca99565ee2a 100644 --- a/apps/hash-ai-worker-ts/src/activities/flow-activities/shared/get-file-entity-from-url.ts +++ b/apps/hash-ai-worker-ts/src/activities/flow-activities/shared/create-file-entity-from-url.ts @@ -127,7 +127,7 @@ const writeFileToS3URL = async ({ }); }; -export const getFileEntityFromUrl = async (params: { +export const createFileEntityFromUrl = async (params: { entityUuid: EntityUuid | null; url: string; propertyMetadata?: PropertyMetadataObject; diff --git a/apps/hash-ai-worker-ts/src/workflows.ts b/apps/hash-ai-worker-ts/src/workflows.ts index bba0fd24829..c81f093283d 100644 --- a/apps/hash-ai-worker-ts/src/workflows.ts +++ b/apps/hash-ai-worker-ts/src/workflows.ts @@ -15,7 +15,6 @@ import type { CreateEmbeddingsParams, CreateEmbeddingsReturn, } from "@local/hash-isomorphic-utils/ai-inference-types"; -import { generateVersionedUrlMatchingFilter } from "@local/hash-isomorphic-utils/graph-queries"; import { systemEntityTypes } from "@local/hash-isomorphic-utils/ontology-type-ids"; import type { ParseTextFromFileParams } from "@local/hash-isomorphic-utils/parse-text-from-file-types"; import { @@ -405,11 +404,9 @@ export const updateEntityEmbeddings = async ( authentication: params.authentication, request: { filter: { - any: entity.metadata.entityTypeIds.map((entityTypeId) => - generateVersionedUrlMatchingFilter(entityTypeId, { - ignoreParents: true, - }), - ), + any: entity.metadata.entityTypeIds.map((entityTypeId) => ({ + equal: [{ path: ["versionedUrl"] }, { parameter: entityTypeId }], + })), }, graphResolveDepths: { inheritsFrom: { outgoing: 255 }, diff --git a/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/migrations/019-add-doc-company-and-person-types.migration.ts b/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/migrations/019-add-doc-company-and-person-types.migration.ts new file mode 100644 index 00000000000..c312eebab0a --- /dev/null +++ b/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/migrations/019-add-doc-company-and-person-types.migration.ts @@ -0,0 +1,541 @@ +import { + blockProtocolDataTypes, + blockProtocolPropertyTypes, + systemDataTypes, + systemPropertyTypes, +} from "@local/hash-isomorphic-utils/ontology-type-ids"; +import { linkEntityTypeUrl } from "@local/hash-subgraph"; + +import type { MigrationFunction } from "../types"; +import { + anyUserInstantiator, + createSystemDataTypeIfNotExists, + createSystemEntityTypeIfNotExists, + createSystemPropertyTypeIfNotExists, +} from "../util"; + +const migrate: MigrationFunction = async ({ + context, + authentication, + migrationState, +}) => { + const doiDataType = await createSystemDataTypeIfNotExists( + context, + authentication, + { + dataTypeDefinition: { + allOf: [{ $ref: blockProtocolDataTypes.text.dataTypeId }], + title: "DOI", + titlePlural: "DOIs", + description: + "A DOI (Digital Object Identifier), used to identify digital objects such as journal articles or datasets.", + type: "string", + }, + conversions: {}, + migrationState, + webShortname: "hash", + }, + ); + + const doiPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "DOI", + description: "The Digital Object Identifier (DOI) of an object", + possibleValues: [{ dataTypeId: doiDataType.schema.$id }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const doiLinkPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "DOI Link", + description: + "A permanent link for a digital object, using its Digital Object Identifier (DOI), which resolves to a webpage describing it", + possibleValues: [{ dataTypeId: systemDataTypes.uri.dataTypeId }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const isbnDataType = await createSystemDataTypeIfNotExists( + context, + authentication, + { + dataTypeDefinition: { + allOf: [{ $ref: blockProtocolDataTypes.text.dataTypeId }], + title: "ISBN", + titlePlural: "ISBNs", + description: + "International Standard Book Number: a numeric commercial book identifier that is intended to be unique, issued by an affiliate of the International ISBN Agency.", + type: "string", + }, + conversions: {}, + migrationState, + webShortname: "hash", + }, + ); + + const isbnPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "ISBN", + description: "The International Standard Book Number (ISBN) of a book", + possibleValues: [{ dataTypeId: isbnDataType.schema.$id }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const summaryPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "Summary", + description: "An overview or synopsis of something.", + possibleValues: [{ primitiveDataType: "text" }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const integerDataType = await createSystemDataTypeIfNotExists( + context, + authentication, + { + dataTypeDefinition: { + allOf: [{ $ref: blockProtocolDataTypes.number.dataTypeId }], + title: "Integer", + description: + "The number zero (0), a positive natural number (e.g. 1, 2, 3), or the negation of a positive natural number (e.g. -1, -2, -3).", + multipleOf: 1, + type: "number", + }, + conversions: {}, + migrationState, + webShortname: "hash", + }, + ); + + const yearDataType = await createSystemDataTypeIfNotExists( + context, + authentication, + { + dataTypeDefinition: { + allOf: [{ $ref: integerDataType.schema.$id }], + title: "Year", + description: "A year in the Gregorian calendar.", + type: "number", + }, + conversions: {}, + migrationState, + webShortname: "hash", + }, + ); + + const publicationYear = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "Publication Year", + description: "The year in which something was first published.", + possibleValues: [{ dataTypeId: yearDataType.schema.$id }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const methodologyPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "Methodology", + description: + "The procedure via which something was produced, analyzed, or otherwise approached.", + possibleValues: [{ primitiveDataType: "text" }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const experimentalSubjectPropertyType = + await createSystemPropertyTypeIfNotExists(context, authentication, { + propertyTypeDefinition: { + title: "Experimental Subject", + description: + "The type of participant or observed entity in an experiment or study.", + possibleValues: [{ primitiveDataType: "text" }], + }, + migrationState, + webShortname: "hash", + }); + + const findingPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "Finding", + description: + "The results or conclusion of an experiment, research project, investigation, etc.", + possibleValues: [{ primitiveDataType: "text" }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const authoredByLinkEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + allOf: [linkEntityTypeUrl], + icon: "🖊", + title: "Authored By", + titlePlural: "Authored Bys", + inverse: { + title: "Author Of", + }, + description: "Who or what something was authored by", + }, + migrationState, + instantiator: anyUserInstantiator, + webShortname: "hash", + }, + ); + + /** @todo H-3619: Infer info on publisher and link to docs */ + // const _publishedByLinkEntityType = await createSystemEntityTypeIfNotExists( + // context, + // authentication, + // { + // entityTypeDefinition: { + // allOf: [linkEntityTypeUrl], + // title: "Published By", + // titlePlural: "Published Bys", + // inverse: { + // title: "Published", + // }, + // description: "The entity that published something", + // }, + // migrationState, + // instantiator: anyUserInstantiator, + // webShortname: "hash", + // }, + // ); + + /** @todo H-3619: Infer info on publisher and link to docs */ + // const publishedInLinkEntityType = await createSystemEntityTypeIfNotExists( + // context, + // authentication, + // { + // entityTypeDefinition: { + // allOf: [linkEntityTypeUrl], + // title: "Published In", + // titlePlural: "Published Ins", + // inverse: { + // title: "Published", + // }, + // description: "The place in which something was published", + // }, + // migrationState, + // instantiator: anyUserInstantiator, + // webShortname: "hash", + // }, + // ); + + const affiliatedWith = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + allOf: [linkEntityTypeUrl], + title: "Affiliated With", + titlePlural: "Affiliated Withs", + inverse: { + title: "Affiliated Width", + }, + description: "Something that something is affiliated with.", + }, + migrationState, + instantiator: anyUserInstantiator, + webShortname: "hash", + }, + ); + + const institutionEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + title: "Institution", + icon: "🏛", + description: + "An organization dedicated to a specific purpose, such as education, research, or public service, and structured with formal systems of governance and operation.", + properties: [ + { + propertyType: blockProtocolPropertyTypes.name.propertyTypeId, + required: true, + }, + { + propertyType: blockProtocolPropertyTypes.description.propertyTypeId, + }, + ], + }, + instantiator: anyUserInstantiator, + migrationState, + webShortname: "hash", + }, + ); + + /** @todo H-3619: Infer info on publication venue and link to docs */ + // const archiveEntityType = await createSystemEntityTypeIfNotExists( + // context, + // authentication, + // { + // entityTypeDefinition: { + // title: "Archive", + // description: "A collection of documents, records or other artifacts.", + // properties: [ + // { + // propertyType: blockProtocolPropertyTypes.name.propertyTypeId, + // required: true, + // }, + // { + // propertyType: blockProtocolPropertyTypes.description.propertyTypeId, + // }, + // ], + // }, + // instantiator: anyUserInstantiator, + // migrationState, + // webShortname: "hash", + // }, + // ); + + /** @todo H-3619: Infer info on publisher and link to docs */ + // const journalEntityType = await createSystemEntityTypeIfNotExists( + // context, + // authentication, + // { + // entityTypeDefinition: { + // title: "Journal", + // description: + // "A periodical publication containing articles and other content related to a particular subject or profession.", + // properties: [ + // { + // propertyType: blockProtocolPropertyTypes.name.propertyTypeId, + // required: true, + // }, + // { + // propertyType: blockProtocolPropertyTypes.description.propertyTypeId, + // }, + // ], + // }, + // instantiator: anyUserInstantiator, + // migrationState, + // webShortname: "hash", + // }, + // ); + + /** @todo H-3619: Infer info on publisher and link to docs */ + // const _universityEntityType = await createSystemEntityTypeIfNotExists( + // context, + // authentication, + // { + // entityTypeDefinition: { + // allOf: [institutionEntityType.schema.$id], + // title: "University", + // description: + // "An institution of higher education and research, typically offering undergraduate and postgraduate degrees across a wide range of disciplines, and often engaging in the creation and dissemination of knowledge.", + // }, + // instantiator: anyUserInstantiator, + // migrationState, + // webShortname: "hash", + // }, + // ); + + const personEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + title: "Person", + icon: "👤", + /** @todo improve this desc */ + description: "A human being", + labelProperty: blockProtocolPropertyTypes.name.propertyTypeBaseUrl, + properties: [ + { + propertyType: blockProtocolPropertyTypes.name.propertyTypeId, + required: true, + }, + { + propertyType: blockProtocolPropertyTypes.description.propertyTypeId, + }, + { + propertyType: systemPropertyTypes.email.propertyTypeId, + array: true, + }, + ], + outgoingLinks: [ + { + destinationEntityTypes: [institutionEntityType.schema.$id], + linkEntityType: affiliatedWith.schema.$id, + }, + ], + }, + instantiator: anyUserInstantiator, + migrationState, + webShortname: "hash", + }, + ); + + const numberOfPagesPropertyType = await createSystemPropertyTypeIfNotExists( + context, + authentication, + { + propertyTypeDefinition: { + title: "Number of Pages", + description: "The total number of pages something has.", + possibleValues: [{ primitiveDataType: "number" }], + }, + migrationState, + webShortname: "hash", + }, + ); + + const docEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + title: "Doc", + description: "A written work, such as a book or article.", + icon: "📝", + labelProperty: systemPropertyTypes.title.propertyTypeBaseUrl, + properties: [ + { + propertyType: systemPropertyTypes.title.propertyTypeId, + required: true, + }, + { + propertyType: summaryPropertyType.schema.$id, + }, + { + propertyType: numberOfPagesPropertyType.schema.$id, + }, + { + propertyType: publicationYear.schema.$id, + }, + ], + outgoingLinks: [ + { + destinationEntityTypes: [personEntityType.schema.$id], + linkEntityType: authoredByLinkEntityType.schema.$id, + }, + ], + }, + instantiator: anyUserInstantiator, + migrationState, + webShortname: "hash", + }, + ); + + const _bookEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + allOf: [docEntityType.schema.$id], + title: "Book", + description: + "A written work, typically longer than an article, often published in print form.", + properties: [ + { + propertyType: isbnPropertyType.schema.$id, + }, + ], + }, + instantiator: anyUserInstantiator, + migrationState, + webShortname: "hash", + }, + ); + + const _academicPaperEntityType = await createSystemEntityTypeIfNotExists( + context, + authentication, + { + entityTypeDefinition: { + allOf: [docEntityType.schema.$id], + title: "Academic Paper", + description: "A paper describing academic research", + properties: [ + { + propertyType: systemPropertyTypes.title.propertyTypeId, + required: true, + }, + { + propertyType: doiPropertyType.schema.$id, + required: false, + }, + { + propertyType: doiLinkPropertyType.schema.$id, + required: false, + }, + { + propertyType: summaryPropertyType.schema.$id, + required: true, + }, + { + propertyType: methodologyPropertyType.schema.$id, + required: false, + }, + { + propertyType: experimentalSubjectPropertyType.schema.$id, + required: false, + }, + { + propertyType: findingPropertyType.schema.$id, + required: false, + }, + ], + outgoingLinks: [ + /** @todo H-3619: Infer info on publisher and link to docs */ + // { + // destinationEntityTypes: [ + // archiveEntityType.schema.$id, + // journalEntityType.schema.$id, + // ], + // linkEntityType: publishedInLinkEntityType.schema.$id, + // }, + ], + }, + instantiator: anyUserInstantiator, + migrationState, + webShortname: "hash", + }, + ); + + return migrationState; +}; + +export default migrate; diff --git a/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/util.ts b/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/util.ts index 2f7d6804a91..4cf0cb634bc 100644 --- a/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/util.ts +++ b/apps/hash-api/src/graph/ensure-system-graph-is-initialized/migrate-ontology-types/util.ts @@ -411,6 +411,8 @@ export type EntityTypeDefinition = { allOf?: VersionedUrl[]; entityTypeId: VersionedUrl; title: string; + titlePlural?: string; + inverse?: EntityType["inverse"]; description: string; labelProperty?: BaseUrl; icon?: string; diff --git a/apps/hash-api/src/graphql/resolvers/knowledge/file/create-file-from-url.ts b/apps/hash-api/src/graphql/resolvers/knowledge/file/create-file-from-url.ts index 8d7449c1b27..0f8c0e78b10 100644 --- a/apps/hash-api/src/graphql/resolvers/knowledge/file/create-file-from-url.ts +++ b/apps/hash-api/src/graphql/resolvers/knowledge/file/create-file-from-url.ts @@ -1,5 +1,6 @@ import type { Entity } from "@local/hash-graph-sdk/entity"; import type { File as FileEntity } from "@local/hash-isomorphic-utils/system-types/shared"; +import { extractOwnedByIdFromEntityId } from "@local/hash-subgraph"; import { createFileFromExternalUrl } from "../../../../graph/knowledge/system-types/file"; import type { @@ -8,6 +9,7 @@ import type { } from "../../../api-types.gen"; import type { LoggedInGraphQLContext } from "../../../context"; import { graphQLContextToImpureGraphContext } from "../../util"; +import { triggerPdfAnalysisWorkflow } from "./shared"; export const createFileFromUrl: ResolverFn< Promise>, @@ -25,7 +27,7 @@ export const createFileFromUrl: ResolverFn< }, graphQLContext, ) => { - const { authentication } = graphQLContext; + const { authentication, temporal } = graphQLContext; const context = graphQLContextToImpureGraphContext(graphQLContext); const entity = await createFileFromExternalUrl(context, authentication, { @@ -36,5 +38,12 @@ export const createFileFromUrl: ResolverFn< url, }); + await triggerPdfAnalysisWorkflow({ + entity, + temporalClient: temporal, + userAccountId: authentication.actorId, + webId: extractOwnedByIdFromEntityId(entity.entityId), + }); + return entity; }; diff --git a/apps/hash-api/src/graphql/resolvers/knowledge/file/request-file-upload.ts b/apps/hash-api/src/graphql/resolvers/knowledge/file/request-file-upload.ts index 5477cf2c713..ec02fcf7125 100644 --- a/apps/hash-api/src/graphql/resolvers/knowledge/file/request-file-upload.ts +++ b/apps/hash-api/src/graphql/resolvers/knowledge/file/request-file-upload.ts @@ -1,3 +1,4 @@ +import { extractOwnedByIdFromEntityId } from "@local/hash-subgraph"; import { UserInputError } from "apollo-server-errors"; import { createFileFromUploadRequest } from "../../../../graph/knowledge/system-types/file"; @@ -8,6 +9,7 @@ import type { } from "../../../api-types.gen"; import type { LoggedInGraphQLContext } from "../../../context"; import { graphQLContextToImpureGraphContext } from "../../util"; +import { triggerPdfAnalysisWorkflow } from "./shared"; /** * We want to limit the size of files that can be uploaded to account @@ -37,7 +39,7 @@ export const requestFileUpload: ResolverFn< }, graphQLContext, ) => { - const { authentication } = graphQLContext; + const { authentication, temporal } = graphQLContext; const context = graphQLContextToImpureGraphContext(graphQLContext); if (size > maximumFileSizeInBytes) { @@ -59,6 +61,13 @@ export const requestFileUpload: ResolverFn< }, ); + await triggerPdfAnalysisWorkflow({ + entity, + temporalClient: temporal, + userAccountId: authentication.actorId, + webId: extractOwnedByIdFromEntityId(entity.entityId), + }); + return { presignedPut, entity: entity.toJSON(), diff --git a/apps/hash-api/src/graphql/resolvers/knowledge/file/shared.ts b/apps/hash-api/src/graphql/resolvers/knowledge/file/shared.ts new file mode 100644 index 00000000000..42ac8247c03 --- /dev/null +++ b/apps/hash-api/src/graphql/resolvers/knowledge/file/shared.ts @@ -0,0 +1,81 @@ +import type { TemporalClient } from "@local/hash-backend-utils/temporal"; +import type { Entity } from "@local/hash-graph-sdk/entity"; +import type { AccountId } from "@local/hash-graph-types/account"; +import type { OwnedById } from "@local/hash-graph-types/web"; +import { inferMetadataFromDocumentFlowDefinition } from "@local/hash-isomorphic-utils/flows/file-flow-definitions"; +import type { + RunFlowWorkflowParams, + RunFlowWorkflowResponse, +} from "@local/hash-isomorphic-utils/flows/temporal-types"; +import { generateUuid } from "@local/hash-isomorphic-utils/generate-uuid"; +import type { File } from "@local/hash-isomorphic-utils/system-types/shared"; + +export const triggerPdfAnalysisWorkflow = async ({ + entity, + temporalClient, + userAccountId, + webId, +}: { + entity: Entity; + temporalClient: TemporalClient; + userAccountId: AccountId; + webId: OwnedById; +}) => { + const { entityId, properties } = entity; + + const mimeType = + properties[ + "https://blockprotocol.org/@blockprotocol/types/property-type/mime-type/" + ]; + + if (mimeType !== "application/pdf") { + return; + } + + const params: RunFlowWorkflowParams = { + dataSources: { + files: { fileEntityIds: [] }, + internetAccess: { + enabled: false, + browserPlugin: { + enabled: false, + domains: [], + }, + }, + }, + flowDefinition: inferMetadataFromDocumentFlowDefinition, + flowTrigger: { + triggerDefinitionId: "onFileUpload", + outputs: [ + { + outputName: "fileEntityId", + payload: { + kind: "EntityId", + value: entityId, + }, + }, + ], + }, + userAuthentication: { actorId: userAccountId }, + webId, + }; + + const workflowId = generateUuid(); + + await temporalClient.workflow.start< + (params: RunFlowWorkflowParams) => Promise + >("runFlow", { + taskQueue: "ai", + args: [params], + memo: { + flowDefinitionId: + inferMetadataFromDocumentFlowDefinition.flowDefinitionId, + userAccountId, + webId, + }, + retry: { + maximumAttempts: 1, + }, + workflowId, + }); +}; diff --git a/apps/hash-api/src/seed-data/seed-flow-test-types.ts b/apps/hash-api/src/seed-data/seed-flow-test-types.ts index 46e7b9d6adf..39255ac2606 100644 --- a/apps/hash-api/src/seed-data/seed-flow-test-types.ts +++ b/apps/hash-api/src/seed-data/seed-flow-test-types.ts @@ -614,6 +614,7 @@ const seedFlowTestTypes = async () => { { propertyType: systemPropertyTypes.email.propertyTypeId, required: false, + array: true, }, { propertyType: linkedinUrlPropertyType.schema.$id, diff --git a/apps/hash-frontend/.eslintrc.cjs b/apps/hash-frontend/.eslintrc.cjs index 9e9a076e450..8e62f091c00 100644 --- a/apps/hash-frontend/.eslintrc.cjs +++ b/apps/hash-frontend/.eslintrc.cjs @@ -65,6 +65,7 @@ module.exports = { "@hashintel/design-system/*", "!@hashintel/design-system/theme", "!@hashintel/design-system/constants", + "!@hashintel/design-system/palettes", ], message: "Please import from @hashintel/design-system instead", }, diff --git a/apps/hash-frontend/src/components/hooks/block-protocol-functions/knowledge/use-block-protocol-create-entity.ts b/apps/hash-frontend/src/components/hooks/block-protocol-functions/knowledge/use-block-protocol-create-entity.ts index 2131d1a48f9..cdfbabcbb30 100644 --- a/apps/hash-frontend/src/components/hooks/block-protocol-functions/knowledge/use-block-protocol-create-entity.ts +++ b/apps/hash-frontend/src/components/hooks/block-protocol-functions/knowledge/use-block-protocol-create-entity.ts @@ -12,7 +12,7 @@ import type { } from "../../../../graphql/api-types.gen"; import { createEntityMutation, - queryEntitiesQuery, + getEntitySubgraphQuery, } from "../../../../graphql/queries/knowledge/entity.queries"; import { useActiveWorkspace } from "../../../../pages/shared/workspace-context"; import { generateUseEntityTypeEntitiesQueryVariables } from "../../../../shared/use-entity-type-entities"; @@ -39,7 +39,7 @@ export const useBlockProtocolCreateEntity = ( * a type is created by a user that is from a different web. */ { - query: queryEntitiesQuery, + query: getEntitySubgraphQuery, variables: generateUseEntityTypeEntitiesQueryVariables({ ownedById: activeWorkspaceOwnedById, }), diff --git a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell.ts b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell.ts index 525aff0fb64..8c622a75493 100644 --- a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell.ts +++ b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell.ts @@ -37,7 +37,6 @@ export const renderLinkedWithCell: CustomRenderer = { const { linkAndTargetEntities, markLinkAsArchived, - onEntityClick, isFile, isList, isUploading, @@ -152,21 +151,6 @@ export const renderLinkedWithCell: CustomRenderer = { left: accumulatedLeft, }); - entityChipInteractables.push( - InteractableManager.createCellInteractable(args, { - id: rightEntity.metadata.recordId.entityId, - pos: { - left: accumulatedLeft, - right: accumulatedLeft + chipWidth, - top: yCenter - 16, - bottom: yCenter + 16, - }, - onClick: () => { - onEntityClick(rightEntity.entityId); - }, - }), - ); - accumulatedLeft += chipWidth + chipGap; } diff --git a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell/linked-entity-list-editor.tsx b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell/linked-entity-list-editor.tsx index 01534eaa720..3985d228e46 100644 --- a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell/linked-entity-list-editor.tsx +++ b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/outgoing-links-section/cells/linked-with-cell/linked-entity-list-editor.tsx @@ -168,6 +168,7 @@ export const LinkedEntityListEditor: ProvideEditorComponent = ( return ( void; entityId: EntityId; title: string; imageSrc?: string; @@ -40,7 +42,10 @@ export const LinkedEntityListRow = ({ > onEntityClick(entityId)} + onClick={() => { + closeEditor(); + onEntityClick(entityId); + }} sx={{ background: "none", border: "none", diff --git a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/shared/table-styling.ts b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/shared/table-styling.ts index 7ae02bb1ee5..f88b1990b30 100644 --- a/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/shared/table-styling.ts +++ b/apps/hash-frontend/src/pages/[shortname]/entities/[entity-uuid].page/entity-editor/links-section/shared/table-styling.ts @@ -11,6 +11,7 @@ export const linksTableCellSx: SxProps = { borderBottom: "none", color: ({ palette }) => palette.gray[80], height: linksTableRowHeight, + overflowX: "hidden", }; export const linksTableFontSize = 14; diff --git a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/activity-log.tsx b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/activity-log.tsx index 460503b872d..7d9f2f3673d 100644 --- a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/activity-log.tsx +++ b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/activity-log.tsx @@ -6,7 +6,6 @@ import { } from "@hashintel/design-system"; import { Entity } from "@local/hash-graph-sdk/entity"; import type { - EntityId, EntityMetadata, EntityRecordId, } from "@local/hash-graph-types/entity"; @@ -77,16 +76,20 @@ const getEntityLabelFromLog = (log: StepProgressLog): string => { ? entity.entityTypeIds : entity.metadata.entityTypeIds; - const entityLabel = generateEntityLabel(null, { - properties: entity.properties, - metadata: { - recordId: { - editionId: "irrelevant-here", - entityId: `ownedBy~${entityId}` as EntityId, - } satisfies EntityRecordId, - entityTypeIds, - } as EntityMetadata, - }); + const entityLabel = generateEntityLabel( + null, + { + properties: entity.properties, + metadata: { + recordId: { + editionId: "irrelevant-here", + entityId, + } satisfies EntityRecordId, + entityTypeIds, + } as EntityMetadata, + }, + true, + ); return entityLabel; }; diff --git a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs.tsx b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs.tsx index cc6e9b5dbb6..ed9631e33bf 100644 --- a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs.tsx +++ b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs.tsx @@ -30,7 +30,7 @@ import { import type { SvgIconProps } from "@mui/material"; import { Box, Collapse, Stack, Typography } from "@mui/material"; import type { FunctionComponent, PropsWithChildren, ReactNode } from "react"; -import { useCallback, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useState } from "react"; import type { FlowRun, @@ -351,67 +351,81 @@ export const Outputs = ({ [persistedEntities], ); - const { data: proposedEntitiesTypesData } = useQuery< - QueryEntityTypesQuery, - QueryEntityTypesQueryVariables - >(queryEntityTypesQuery, { - fetchPolicy: "cache-and-network", - variables: { - filter: { - any: [ - ...new Set( - proposedEntities.flatMap( - (proposedEntity) => proposedEntity.entityTypeIds, + const { + data: proposedEntitiesTypesData, + previousData: previousProposedEntitiesTypesData, + } = useQuery( + queryEntityTypesQuery, + { + fetchPolicy: "cache-and-network", + variables: { + filter: { + any: [ + ...new Set( + proposedEntities.flatMap( + (proposedEntity) => proposedEntity.entityTypeIds, + ), ), + ].map((entityTypeId) => + generateVersionedUrlMatchingFilter(entityTypeId, { + forEntityType: true, + }), ), - ].map((entityTypeId) => - generateVersionedUrlMatchingFilter(entityTypeId, { - forEntityType: true, - }), - ), + }, + ...fullOntologyResolveDepths, }, - ...fullOntologyResolveDepths, + skip: proposedEntities.length === 0, }, - skip: proposedEntities.length === 0, - }); + ); const proposedEntitiesTypesSubgraph = useMemo(() => { if (!proposedEntitiesTypesData) { - return undefined; + return previousProposedEntitiesTypesData + ? deserializeSubgraph( + previousProposedEntitiesTypesData.queryEntityTypes, + ) + : undefined; } return deserializeSubgraph(proposedEntitiesTypesData.queryEntityTypes); - }, [proposedEntitiesTypesData]); - - const { data: entitiesSubgraphData } = useQuery< - GetEntitySubgraphQuery, - GetEntitySubgraphQueryVariables - >(getEntitySubgraphQuery, { - variables: { - includePermissions: false, - request: { - filter: persistedEntitiesFilter, - graphResolveDepths: { - ...zeroedGraphResolveDepths, - ...fullOntologyResolveDepths, + }, [proposedEntitiesTypesData, previousProposedEntitiesTypesData]); + + const { + data: persistedEntitiesSubgraphData, + previousData: previousPersistedEntitiesSubgraphData, + } = useQuery( + getEntitySubgraphQuery, + { + variables: { + includePermissions: false, + request: { + filter: persistedEntitiesFilter, + graphResolveDepths: { + ...zeroedGraphResolveDepths, + ...fullOntologyResolveDepths, + }, + temporalAxes: currentTimeInstantTemporalAxes, + includeDrafts: true, }, - temporalAxes: currentTimeInstantTemporalAxes, - includeDrafts: true, }, + skip: !persistedEntities.length, + fetchPolicy: "network-only", }, - skip: !persistedEntities.length, - fetchPolicy: "network-only", - }); + ); const persistedEntitiesSubgraph = useMemo(() => { - if (!entitiesSubgraphData) { - return undefined; + if (!persistedEntitiesSubgraphData) { + return previousPersistedEntitiesSubgraphData + ? deserializeSubgraph( + previousPersistedEntitiesSubgraphData.getEntitySubgraph.subgraph, + ) + : undefined; } return deserializeSubgraph( - entitiesSubgraphData.getEntitySubgraph.subgraph, + persistedEntitiesSubgraphData.getEntitySubgraph.subgraph, ); - }, [entitiesSubgraphData]); + }, [persistedEntitiesSubgraphData, previousPersistedEntitiesSubgraphData]); const selectedEntitySubgraph = useMemo(() => { const selectedEntityId = @@ -421,6 +435,23 @@ export const Outputs = ({ return undefined; } + const persistedEntity = persistedEntities.find( + ({ entity }) => + entity && + new Entity(entity).metadata.recordId.entityId === selectedEntityId, + ); + + if (persistedEntity) { + if (!persistedEntitiesSubgraph) { + return undefined; + } + + return generateEntityRootedSubgraph( + selectedEntityId, + persistedEntitiesSubgraph, + ); + } + const proposedEntity = proposedEntities.find( (entity) => entity.localEntityId === selectedEntityId, ); @@ -486,16 +517,8 @@ export const Outputs = ({ return mockSubgraph; } - - if (!persistedEntitiesSubgraph) { - return undefined; - } - - return generateEntityRootedSubgraph( - selectedEntityId, - persistedEntitiesSubgraph, - ); }, [ + persistedEntities, persistedEntitiesSubgraph, proposedEntitiesTypesSubgraph, proposedEntities, @@ -503,6 +526,12 @@ export const Outputs = ({ slideOver, ]); + useEffect(() => { + if (!hasClaims && visibleSection === "claims" && hasEntities) { + setVisibleSection("entities"); + } + }, [hasClaims, hasEntities, visibleSection]); + const entitiesForGraph = useMemo(() => { const entities: EntityForGraphChart[] = []; diff --git a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-graph.tsx b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-graph.tsx index 296748c289e..5e1b553a00b 100644 --- a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-graph.tsx +++ b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-graph.tsx @@ -1,10 +1,12 @@ import type { VersionedUrl } from "@blockprotocol/type-system-rs/pkg/type-system"; import type { EntityForGraphChart } from "@hashintel/block-design-system"; -import { EntitiesGraphChart } from "@hashintel/block-design-system"; +import { LoadingSpinner } from "@hashintel/design-system"; import type { EntityId } from "@local/hash-graph-types/entity"; import type { Subgraph } from "@local/hash-subgraph"; +import { useTheme } from "@mui/material"; import { useMemo } from "react"; +import { EntityGraphVisualizer } from "../../../../shared/entity-graph-visualizer"; import { EmptyOutputBox } from "./shared/empty-output-box"; import { outputIcons } from "./shared/icons"; import { OutputContainer } from "./shared/output-container"; @@ -56,6 +58,8 @@ export const EntityResultGraph = ({ return Object.values(deduplicatedLatestEntitiesByEntityId); }, [entities]); + const theme = useTheme(); + if (!subgraphWithTypes && !entities.length) { return ( @@ -68,14 +72,16 @@ export const EntityResultGraph = ({ } return ( - + {subgraphWithTypes && ( - + } onEntityClick={onEntityClick} onEntityTypeClick={onEntityTypeClick} subgraphWithTypes={subgraphWithTypes} - sx={{ maxHeight: "100%" }} /> )} diff --git a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-table.tsx b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-table.tsx index 25069b203fc..b5f2f855b9a 100644 --- a/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-table.tsx +++ b/apps/hash-frontend/src/pages/[shortname]/shared/flow-visualizer/outputs/entity-result-table.tsx @@ -1013,9 +1013,6 @@ export const EntityResultTable = memo( ), ) ) { - /** - * There are no values in common between the filter and the link targets for this type, for this row - */ return false; } } else { diff --git a/apps/hash-frontend/src/pages/shared/entity-graph-visualizer.tsx b/apps/hash-frontend/src/pages/shared/entity-graph-visualizer.tsx index 19f5f14bcf0..4d59633470b 100644 --- a/apps/hash-frontend/src/pages/shared/entity-graph-visualizer.tsx +++ b/apps/hash-frontend/src/pages/shared/entity-graph-visualizer.tsx @@ -1,4 +1,6 @@ +import { mustHaveAtLeastOne } from "@blockprotocol/type-system"; import type { VersionedUrl } from "@blockprotocol/type-system-rs/pkg/type-system"; +import { ibm } from "@hashintel/design-system/palettes"; import type { EntityId, EntityMetadata, @@ -33,7 +35,7 @@ export type EntityForGraph = { }; const fallbackDefaultConfig = { - graphKey: "entity-graph", + graphKey: "entity-graph-2024-11-19b", edgeSizing: { min: 2, max: 5, @@ -87,28 +89,7 @@ export const EntityGraphVisualizer = memo( const [loading, setLoading] = useState(true); const nodeColors = useMemo(() => { - return [ - { - color: palette.blue[30], - borderColor: palette.gray[50], - }, - { - color: palette.purple[30], - borderColor: palette.gray[50], - }, - { - color: palette.green[50], - borderColor: palette.gray[50], - }, - { - color: palette.red[20], - borderColor: palette.gray[50], - }, - { - color: palette.yellow[30], - borderColor: palette.gray[50], - }, - ] as const; + return ibm.map((color) => ({ color, borderColor: palette.gray[50] })); }, [palette]); const defaultConfig = defaultConfigFromProps ?? fallbackDefaultConfig; @@ -149,42 +130,43 @@ export const EntityGraphVisualizer = memo( const specialHighlight = isPrimaryEntity?.(entity) ?? false; + const sortedEntityTypeIds = mustHaveAtLeastOne( + entity.metadata.entityTypeIds.toSorted(), + ); + + const firstEntityTypeId = sortedEntityTypeIds[0]; + /** * @todo H-3539: take account of additional types an entity might have */ - if (!entityTypeIdToColor.has(entity.metadata.entityTypeIds[0])) { + if (!entityTypeIdToColor.has(firstEntityTypeId)) { entityTypeIdToColor.set( - entity.metadata.entityTypeIds[0], + firstEntityTypeId, entityTypeIdToColor.size % nodeColors.length, ); } const { color, borderColor } = specialHighlight ? { color: palette.blue[50], borderColor: palette.blue[60] } - : nodeColors[ - entityTypeIdToColor.get(entity.metadata.entityTypeIds[0])! - ]!; + : nodeColors[entityTypeIdToColor.get(firstEntityTypeId)!]!; const entityType = - entityTypesById[entity.metadata.entityTypeIds[0]] ?? - getEntityTypeById( - subgraphWithTypes, - entity.metadata.entityTypeIds[0], - ); + entityTypesById[firstEntityTypeId] ?? + getEntityTypeById(subgraphWithTypes, firstEntityTypeId); if (!entityType) { throw new Error( - `Could not find entity type for ${entity.metadata.entityTypeIds[0]}`, + `Could not find entity type for ${firstEntityTypeId}`, ); } - entityTypesById[entity.metadata.entityTypeIds[0]] ??= entityType; + entityTypesById[firstEntityTypeId] ??= entityType; nodesToAddByNodeId[entity.metadata.recordId.entityId] = { icon: entityType.schema.icon ?? undefined, label: generateEntityLabel(subgraphWithTypes, entity), nodeId: entity.metadata.recordId.entityId, - nodeTypeId: entity.metadata.entityTypeIds[0], + nodeTypeId: firstEntityTypeId, nodeTypeLabel: entityType.schema.title, color, borderColor, diff --git a/apps/hash-frontend/src/pages/shared/flow-definitions-context.tsx b/apps/hash-frontend/src/pages/shared/flow-definitions-context.tsx index e680ef40dd3..4e610663c80 100644 --- a/apps/hash-frontend/src/pages/shared/flow-definitions-context.tsx +++ b/apps/hash-frontend/src/pages/shared/flow-definitions-context.tsx @@ -11,6 +11,7 @@ import { researchTaskFlowDefinition, saveFileFromUrl, } from "@local/hash-isomorphic-utils/flows/example-flow-definitions"; +import { inferMetadataFromDocumentFlowDefinition } from "@local/hash-isomorphic-utils/flows/file-flow-definitions"; import { goalFlowDefinition, goalFlowDefinitionWithReportAndSpreadsheetDeliverable, @@ -33,6 +34,7 @@ const exampleFlows: FlowDefinition[] = [ researchTaskFlowDefinition, researchEntitiesFlowDefinition, ftseInvestorsFlowDefinition, + inferMetadataFromDocumentFlowDefinition, inferUserEntitiesFromWebPageFlowDefinition, answerQuestionFlow, saveFileFromUrl, diff --git a/apps/hash-frontend/src/pages/shared/graph-visualizer/graph-container/use-layout.tsx b/apps/hash-frontend/src/pages/shared/graph-visualizer/graph-container/use-layout.tsx index 071abce12a4..cf96d0a9b98 100644 --- a/apps/hash-frontend/src/pages/shared/graph-visualizer/graph-container/use-layout.tsx +++ b/apps/hash-frontend/src/pages/shared/graph-visualizer/graph-container/use-layout.tsx @@ -22,7 +22,7 @@ export const useLayout = () => { settings: { ...settings, outboundAttractionDistribution: true, - gravity: 2.5, + gravity: 1, scalingRatio: 10, }, }); diff --git a/apps/hash-frontend/src/pages/shared/notifications-with-links-context.tsx b/apps/hash-frontend/src/pages/shared/notifications-with-links-context.tsx index 2100e5ff2e6..3947902d5c9 100644 --- a/apps/hash-frontend/src/pages/shared/notifications-with-links-context.tsx +++ b/apps/hash-frontend/src/pages/shared/notifications-with-links-context.tsx @@ -393,23 +393,40 @@ export const useNotificationsWithLinksContextValue = continue; } + const editions = typedValues(editionMap).flat(); + /** * We have a candidate – this might be one of multiple draft series for the entity, or the single live series. * We match the timestamp logged in the link to the editions of the entity. * This may result in a false positive if the live entity and any of its drafts have editions at the exact same timestamp. */ - occurredInEntity = typedValues(editionMap) - .flat() - .find( - (vertex): vertex is EntityVertex => - vertex.kind === "entity" && - vertex.inner.metadata.temporalVersioning.decisionTime.start - .limit === occurredInEntityEditionTimestamp, - )?.inner; + occurredInEntity = editions.find( + (vertex): vertex is EntityVertex => + vertex.kind === "entity" && + vertex.inner.metadata.temporalVersioning.decisionTime.start + .limit === occurredInEntityEditionTimestamp, + )?.inner; if (occurredInEntity) { break; } + + /** + * If the entity has been updated since the notification was created, we won't have the edition in the subgraph, + * because the request above only fetches editions still valid for the current timestamp. + * In order to show the notification we just take any available edition. + * + * The other option would be to fetch the entire history for all entities which are the subject of a notification, + * but this might be a lot of data. + */ + const anyAvailableEdition = editions.find( + (vertex): vertex is EntityVertex => vertex.kind === "entity", + )?.inner; + + if (anyAvailableEdition) { + occurredInEntity = anyAvailableEdition; + break; + } } if (!occurredInEntity) { diff --git a/apps/hash-frontend/src/shared/use-entity-type-entities.tsx b/apps/hash-frontend/src/shared/use-entity-type-entities.tsx index f4bc4bef13d..0c80371f663 100644 --- a/apps/hash-frontend/src/shared/use-entity-type-entities.tsx +++ b/apps/hash-frontend/src/shared/use-entity-type-entities.tsx @@ -15,7 +15,6 @@ import { useMemo } from "react"; import type { GetEntitySubgraphQuery, GetEntitySubgraphQueryVariables, - QueryEntitiesQueryVariables, } from "../graphql/api-types.gen"; import { getEntitySubgraphQuery, @@ -24,47 +23,60 @@ import { import { apolloClient } from "../lib/apollo-client"; import type { EntityTypeEntitiesContextValue } from "./entity-type-entities-context"; -export const generateUseEntityTypeEntitiesQueryVariables = (params: { +type UseEntityTypeEntitiesQueryParams = { ownedById?: OwnedById; entityTypeBaseUrl?: BaseUrl; entityTypeId?: VersionedUrl; graphResolveDepths?: Partial; -}): QueryEntitiesQueryVariables => ({ - operation: { - multiFilter: { - filters: [ - ...(params.ownedById +}; + +export const generateUseEntityTypeEntitiesQueryVariables = ({ + ownedById, + entityTypeBaseUrl, + entityTypeId, + graphResolveDepths, +}: UseEntityTypeEntitiesQueryParams): GetEntitySubgraphQueryVariables => ({ + request: { + filter: { + all: [ + ...(ownedById ? [ { - field: ["ownedById"], - operator: "EQUALS" as const, - value: params.ownedById, + equal: [{ path: ["ownedById"] }, { parameter: ownedById }], }, ] : []), - ...(params.entityTypeBaseUrl + ...(entityTypeBaseUrl ? [ { - field: ["metadata", "entityTypeBaseUrl"], - operator: "EQUALS" as const, - value: params.entityTypeBaseUrl, + equal: [ + { path: ["type", "baseUrl"] }, + { parameter: entityTypeBaseUrl }, + ], }, ] - : params.entityTypeId + : entityTypeId ? [ { - field: ["metadata", "entityTypeId"], - operator: "EQUALS" as const, - value: params.entityTypeId, + equal: [ + { path: ["type", "versionedUrl"] }, + { parameter: entityTypeId }, + ], }, ] : []), + ...(!entityTypeId && !entityTypeBaseUrl + ? [ignoreNoisySystemTypesFilter] + : []), ], - operator: "AND", }, + graphResolveDepths: { + ...zeroedGraphResolveDepths, + ...graphResolveDepths, + }, + includeDrafts: false, + temporalAxes: currentTimeInstantTemporalAxes, }, - ...zeroedGraphResolveDepths, - ...params.graphResolveDepths, includePermissions: false, }); @@ -78,50 +90,13 @@ export const useEntityTypeEntities = (params: { params; const variables = useMemo( - () => ({ - request: { - filter: { - all: [ - ...(ownedById - ? [ - { - equal: [{ path: ["ownedById"] }, { parameter: ownedById }], - }, - ] - : []), - ...(entityTypeBaseUrl - ? [ - { - equal: [ - { path: ["type", "baseUrl"] }, - { parameter: entityTypeBaseUrl }, - ], - }, - ] - : entityTypeId - ? [ - { - equal: [ - { path: ["type", "versionedUrl"] }, - { parameter: entityTypeId }, - ], - }, - ] - : []), - ...(!entityTypeId && !entityTypeBaseUrl - ? [ignoreNoisySystemTypesFilter] - : []), - ], - }, - graphResolveDepths: { - ...zeroedGraphResolveDepths, - ...graphResolveDepths, - }, - includeDrafts: false, - temporalAxes: currentTimeInstantTemporalAxes, - }, - includePermissions: false, - }), + () => + generateUseEntityTypeEntitiesQueryVariables({ + entityTypeBaseUrl, + entityTypeId, + ownedById, + graphResolveDepths, + }), [entityTypeBaseUrl, graphResolveDepths, entityTypeId, ownedById], ); diff --git a/infra/docker/frontend/prod/Dockerfile b/infra/docker/frontend/prod/Dockerfile index eece9d4adc4..7193b5b65be 100644 --- a/infra/docker/frontend/prod/Dockerfile +++ b/infra/docker/frontend/prod/Dockerfile @@ -52,11 +52,11 @@ RUN yarn install --frozen-lockfile --prefer-offline \ COPY --from=base /app/out/full/ . -ENV NODE_ENV production +ENV NODE_ENV=production ARG API_ORIGIN -ENV API_ORIGIN ${API_ORIGIN} +ENV API_ORIGIN=${API_ORIGIN} ARG FRONTEND_URL -ENV FRONTEND_URL ${FRONTEND_URL} +ENV FRONTEND_URL=${FRONTEND_URL} RUN yarn turbo build --filter '@apps/hash-frontend' && rm -rf target/ @@ -76,4 +76,4 @@ RUN groupadd --system --gid 60000 hash && \ RUN chown -R frontend:hash .next USER frontend:hash -ENV NODE_ENV production +ENV NODE_ENV=production diff --git a/infra/terraform/hash/main.tf b/infra/terraform/hash/main.tf index 4a8877022b4..4ceb8fec619 100644 --- a/infra/terraform/hash/main.tf +++ b/infra/terraform/hash/main.tf @@ -403,6 +403,25 @@ module "application" { name = "ANTHROPIC_API_KEY", secret = true, value = sensitive(data.vault_kv_secret_v2.secrets.data["hash_anthropic_api_key"]) }, + { + # The name of the HASH App project in Google Cloud + # Note that this is different to the project the service account and identity federation providers are in + # Must have Vertex AI enabled + name = "GOOGLE_CLOUD_HASH_PROJECT_ID", secret = true, + value = sensitive(data.vault_kv_secret_v2.secrets.data["google_cloud_hash_project_id"]) + }, + { + # The name of the Google Cloud Storage bucket to use for uploads + # The authenticated GCP user must have object write and read permissions on the bucket + name = "GOOGLE_CLOUD_STORAGE_BUCKET", secret = true, + value = sensitive(data.vault_kv_secret_v2.secrets.data["google_cloud_storage_bucket"]) + }, + { + # The JSON with the configuration for the AWS Identity Federation Provider, downloaded from GCP + # This should detail a connected service account with VertexAI permissions on the HASH project, and bucket read/write access + name = "GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON", secret = true, + value = sensitive(data.vault_kv_secret_v2.secrets.data["google_cloud_workload_identity_federation_config_json"]) + }, { name = "INTERNAL_API_HOST", secret = true, value = sensitive(data.vault_kv_secret_v2.secrets.data["internal_api_host"]) diff --git a/libs/@blockprotocol/graph/src/codegen/initialize/traverse-and-collate-schemas.ts b/libs/@blockprotocol/graph/src/codegen/initialize/traverse-and-collate-schemas.ts index 3a990df614d..6219c616f56 100644 --- a/libs/@blockprotocol/graph/src/codegen/initialize/traverse-and-collate-schemas.ts +++ b/libs/@blockprotocol/graph/src/codegen/initialize/traverse-and-collate-schemas.ts @@ -1,4 +1,7 @@ -import { atLeastOne } from "@blockprotocol/type-system"; +import { + atLeastOne, + getReferencedIdsFromDataType, +} from "@blockprotocol/type-system"; import type { VersionedUrl } from "@blockprotocol/type-system/slim"; import { getReferencedIdsFromEntityType, @@ -144,6 +147,12 @@ export const traverseAndCollateSchemas = async ( addFetchPromise( fetchTypeAsJson(rewrittenTypeId, initialContext).then((type) => { if (isDataType(type)) { + const { inheritsFromDataTypes } = getReferencedIdsFromDataType(type); + + for (const dependencyTypeId of inheritsFromDataTypes) { + traversalContext.encounter(typeId, dependencyTypeId); + } + initialContext.addDataType(type); const withMetadata = generateDataTypeWithMetadataSchema(type); diff --git a/libs/@blockprotocol/type-system/typescript/src/native/data-type.ts b/libs/@blockprotocol/type-system/typescript/src/native/data-type.ts index 0d422b542eb..9335a86cc76 100644 --- a/libs/@blockprotocol/type-system/typescript/src/native/data-type.ts +++ b/libs/@blockprotocol/type-system/typescript/src/native/data-type.ts @@ -1,4 +1,25 @@ -import type { DataType } from "@blockprotocol/type-system-rs"; +import type { DataType, VersionedUrl } from "@blockprotocol/type-system-rs"; export const DATA_TYPE_META_SCHEMA: DataType["$schema"] = "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type"; + +/** + * Returns all the IDs of all types referenced in a given data type. + * + * @param {DataType} dataType + */ +export const getReferencedIdsFromDataType = ( + dataType: DataType, +): { + inheritsFromDataTypes: VersionedUrl[]; +} => { + const inheritsFromDataTypes: VersionedUrl[] = []; + + for (const inheritedEntityType of dataType.allOf ?? []) { + inheritsFromDataTypes.push(inheritedEntityType.$ref); + } + + return { + inheritsFromDataTypes: [...inheritsFromDataTypes], + }; +}; diff --git a/libs/@blockprotocol/type-system/typescript/src/native/entity-type.ts b/libs/@blockprotocol/type-system/typescript/src/native/entity-type.ts index 7a11735bf58..6aedc6d8fe6 100644 --- a/libs/@blockprotocol/type-system/typescript/src/native/entity-type.ts +++ b/libs/@blockprotocol/type-system/typescript/src/native/entity-type.ts @@ -4,7 +4,7 @@ export const ENTITY_TYPE_META_SCHEMA: EntityType["$schema"] = "https://blockprotocol.org/types/modules/graph/0.3/schema/entity-type"; /** - * Returns all the IDs of all types referenced in a given property type. + * Returns all the IDs of all types referenced in a given entity type. * * @param {EntityType} entityType */ diff --git a/libs/@local/eslint-config/legacy-base-eslintrc-to-refactor.cjs b/libs/@local/eslint-config/legacy-base-eslintrc-to-refactor.cjs index b7fc7aa4073..6c692a3b853 100644 --- a/libs/@local/eslint-config/legacy-base-eslintrc-to-refactor.cjs +++ b/libs/@local/eslint-config/legacy-base-eslintrc-to-refactor.cjs @@ -189,6 +189,7 @@ module.exports = { "@hashintel/design-system/*", "!@hashintel/design-system/theme", "!@hashintel/design-system/constants", + "!@hashintel/design-system/palettes", ], message: "Please import from @hashintel/design-system instead.", }, diff --git a/libs/@local/graph/sdk/typescript/src/entity.ts b/libs/@local/graph/sdk/typescript/src/entity.ts index 44aa4803094..0d1dd24c409 100644 --- a/libs/@local/graph/sdk/typescript/src/entity.ts +++ b/libs/@local/graph/sdk/typescript/src/entity.ts @@ -86,6 +86,7 @@ export type PatchEntityParameters = Omit< propertyPatches?: PropertyPatchOperation[]; provenance: EnforcedEntityEditionProvenance; }; + const typeId: unique symbol = Symbol.for( "@local/hash-graph-sdk/entity/SerializedEntity", ); diff --git a/libs/@local/hash-isomorphic-utils/package.json b/libs/@local/hash-isomorphic-utils/package.json index fe003fc4abd..6b5457924a3 100644 --- a/libs/@local/hash-isomorphic-utils/package.json +++ b/libs/@local/hash-isomorphic-utils/package.json @@ -19,7 +19,7 @@ "build": "rimraf dist && tsc --build tsconfig.build.json", "codegen": "rimraf './src/**/*.gen.*'; graphql-codegen --config codegen.config.ts", "fix:eslint": "eslint --fix .", - "generate-system-types": "tsx ./src/generate-system-types.ts; fix-esm-import-path ./src/system-types/{*.ts,**/*.ts}; yarn fix:eslint; yarn prettier --write ./src/system-types/{*.ts,**/*.ts}", + "generate-system-types": "tsx ./src/generate-system-types.ts; echo 'Fixing import paths'; fix-esm-import-path ./src/system-types/; yarn fix:eslint; yarn prettier --write ./src/system-types/{*.ts,**/*.ts}", "lint:eslint": "eslint --report-unused-disable-directives .", "lint:tsc": "tsc --noEmit", "test:unit": "vitest --run" diff --git a/libs/@local/hash-isomorphic-utils/src/flows/action-definitions.ts b/libs/@local/hash-isomorphic-utils/src/flows/action-definitions.ts index b8b30d35ed6..d4a9842d4fd 100644 --- a/libs/@local/hash-isomorphic-utils/src/flows/action-definitions.ts +++ b/libs/@local/hash-isomorphic-utils/src/flows/action-definitions.ts @@ -12,8 +12,9 @@ const actionDefinitionIds = [ "getFileFromUrl", "getWebPageByUrl", "getWebPageSummary", - "processAutomaticBrowsingSettings", + "inferMetadataFromDocument", "inferEntitiesFromContent", + "processAutomaticBrowsingSettings", "persistEntities", "persistEntity", "researchEntities", @@ -193,6 +194,37 @@ const actionDefinitionsAsConst = { }, ], }, + inferMetadataFromDocument: { + actionDefinitionId: "inferMetadataFromDocument", + name: "Infer Metadata From Document", + description: + "Infer metadata from a document file (document kind, title, number of pages, etc), add the relevant type to the associated entity, and propose new entities representing its author, publisher etc.", + kind: "action", + inputs: [ + { + oneOfPayloadKinds: ["EntityId"], + name: "documentEntityId", + required: true, + array: false, + }, + ], + outputs: [ + { + payloadKind: "ProposedEntity", + description: "The entities inferred from the document, e.g. authors", + name: "proposedEntities", + array: true, + required: true, + }, + { + payloadKind: "PersistedEntity", + description: "The entity representing the Google Sheet synced to.", + name: "updatedDocumentEntity", + array: false, + required: true, + }, + ], + }, persistEntity: { actionDefinitionId: "persistEntity", name: "Persist Entity", diff --git a/libs/@local/hash-isomorphic-utils/src/flows/file-flow-definitions.ts b/libs/@local/hash-isomorphic-utils/src/flows/file-flow-definitions.ts new file mode 100644 index 00000000000..ba01605bd41 --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/flows/file-flow-definitions.ts @@ -0,0 +1,156 @@ +import type { EntityUuid } from "@local/hash-graph-types/entity"; + +import type { + InputNameForAction, + OutputNameForAction, +} from "./action-definitions.js"; +import type { FlowDefinition } from "./types.js"; + +/** + * @example + * mutation { + * startFlow( + * dataSources: { + * files: { fileEntityIds: [] } + * internetAccess: { + * enabled: false + * browserPlugin: { enabled: false, domains: [] } + * } + * } + * webId:"558b0742-3990-4bcc-a486-db231e278894" + * flowTrigger: { + * triggerDefinitionId: "onFileUpload" + * outputs: [ + * { + * outputName: "fileEntityId" + * payload: { + * kind: "EntityId" + * value: "558b0742-3990-4bcc-a486-db231e278894~612a3806-930c-49f6-882f-5ee88a56eaf3" + * } + * } + * ] + * } + * flowDefinition: { + * name: "Infer metadata from document" + * flowDefinitionId: "infer-metadata-from-document" + * description: "Infer metadata from a document, assign appropriate type to document, and create associated entities." + * trigger: { + * kind: "trigger" + * description: "Triggered when user visits a web page" + * triggerDefinitionId: "onFileUpload" + * outputs: [ + * { + * payloadKind: "EntityId" + * name: "fileEntityId" + * array: false + * required: true + * } + * ] + * } + * steps: [ + * { + * stepId: "1" + * kind: "action" + * actionDefinitionId: "inferMetadataFromDocument" + * description: "Infer metadata from document, assign appropriate type, propose associated entities" + * inputSources: [ + * { + * inputName: "documentEntityId" + * kind: "step-output" + * sourceStepId: "trigger" + * sourceStepOutputName: "fileEntityId" + * } + * ] + * } + * { + * stepId: "2" + * kind: "action" + * actionDefinitionId: "persistEntities" + * description: "Save proposed entities to database" + * inputSources: [ + * { + * inputName: "proposedEntities" + * kind: "step-output" + * sourceStepId: "1" + * sourceStepOutputName: "proposedEntities" + * } + * ] + * } + * ] + * outputs: [ + * { + * stepId: "2" + * stepOutputName: "persistedEntities" + * name: "persistedEntities" + * payloadKind: "PersistedEntity" + * array: true + * required: true + * } + * ] + * } + * ) + * } + */ +export const inferMetadataFromDocumentFlowDefinition: FlowDefinition = { + name: "Infer metadata from document", + flowDefinitionId: "infer-metadata-from-document" as EntityUuid, + description: + "Infer metadata from a document, assign appropriate type to document, and create associated entities.", + trigger: { + kind: "trigger", + description: "Triggered when user visits a web page", + triggerDefinitionId: "onFileUpload", + outputs: [ + { + payloadKind: "EntityId", + name: "fileEntityId", + array: false, + required: true, + }, + ], + }, + steps: [ + { + stepId: "1", + kind: "action", + actionDefinitionId: "inferMetadataFromDocument", + description: + "Infer metadata from document, assign appropriate type, propose associated entities", + inputSources: [ + { + inputName: + "documentEntityId" satisfies InputNameForAction<"inferMetadataFromDocument">, + kind: "step-output", + sourceStepId: "trigger", + sourceStepOutputName: "fileEntityId", + }, + ], + }, + { + stepId: "2", + kind: "action", + actionDefinitionId: "persistEntities", + description: "Save proposed entities to database", + inputSources: [ + { + inputName: + "proposedEntities" satisfies InputNameForAction<"persistEntities">, + kind: "step-output", + sourceStepId: "1", + sourceStepOutputName: + "proposedEntities" satisfies OutputNameForAction<"inferMetadataFromDocument">, + }, + ], + }, + ], + outputs: [ + { + stepId: "2", + stepOutputName: "persistedEntities", + name: "persistedEntities" as const, + payloadKind: "PersistedEntity", + array: true, + required: true, + }, + ], +}; diff --git a/libs/@local/hash-isomorphic-utils/src/flows/trigger-definitions.ts b/libs/@local/hash-isomorphic-utils/src/flows/trigger-definitions.ts index abe853cd708..bdd937a98fd 100644 --- a/libs/@local/hash-isomorphic-utils/src/flows/trigger-definitions.ts +++ b/libs/@local/hash-isomorphic-utils/src/flows/trigger-definitions.ts @@ -1,6 +1,7 @@ import type { DeepReadOnly, TriggerDefinition } from "./types.js"; const triggerIds = [ + "onFileUpload", "userTrigger", "userVisitedWebPageTrigger", "scheduledTrigger", @@ -9,6 +10,19 @@ const triggerIds = [ export type TriggerDefinitionId = (typeof triggerIds)[number]; const triggerDefinitionsAsConst = { + onFileUpload: { + kind: "trigger", + triggerDefinitionId: "onFileUpload", + name: "On File Upload", + outputs: [ + { + payloadKind: "EntityId", + name: "fileEntityId" as const, + array: false, + required: true, + }, + ], + }, userTrigger: { kind: "trigger", triggerDefinitionId: "userTrigger", diff --git a/libs/@local/hash-isomorphic-utils/src/flows/types.ts b/libs/@local/hash-isomorphic-utils/src/flows/types.ts index 787ba590322..b05a642ec70 100644 --- a/libs/@local/hash-isomorphic-utils/src/flows/types.ts +++ b/libs/@local/hash-isomorphic-utils/src/flows/types.ts @@ -387,7 +387,11 @@ export type ProgressLogBase = { stepId: string; }; -export type WorkerType = "Coordinator" | "Sub-coordinator" | "Link explorer"; +export type WorkerType = + | "Coordinator" + | "Sub-coordinator" + | "Link explorer" + | "Document analyzer"; /** * Identifiers for a 'worker' within the flow, which corresponds to an agent. diff --git a/libs/@local/hash-isomorphic-utils/src/generate-system-types.ts b/libs/@local/hash-isomorphic-utils/src/generate-system-types.ts index 9d709132ab3..d2e4f8d134b 100644 --- a/libs/@local/hash-isomorphic-utils/src/generate-system-types.ts +++ b/libs/@local/hash-isomorphic-utils/src/generate-system-types.ts @@ -43,21 +43,26 @@ const generateTypes = async ( targets[`${slugify(name)}.ts`] = [{ sourceTypeId: entityTypeId }]; } - await codegen({ - outputFolder: `src/system-types${subFolder ? `/${subFolder}` : ""}`, - targets, - getFetchUrlFromTypeId: (typeId) => { - if (typeId.startsWith("https://hash.ai/")) { - const rewrittenTypeId = typeId.replace( - "https://hash.ai/", - "http://localhost:3000/", - ) as VersionedUrl; + const logLevel = process.env.LOG_LEVEL ?? "info"; - return rewrittenTypeId; - } - return typeId; + await codegen( + { + outputFolder: `src/system-types${subFolder ? `/${subFolder}` : ""}`, + targets, + getFetchUrlFromTypeId: (typeId) => { + if (typeId.startsWith("https://hash.ai/")) { + const rewrittenTypeId = typeId.replace( + "https://hash.ai/", + "http://localhost:3000/", + ) as VersionedUrl; + + return rewrittenTypeId; + } + return typeId; + }, }, - }); + logLevel as Parameters[1], + ); // eslint-disable-next-line no-console console.log(`Done generating ${label} types.`); diff --git a/libs/@local/hash-isomorphic-utils/src/ontology-type-ids.ts b/libs/@local/hash-isomorphic-utils/src/ontology-type-ids.ts index b5f58885054..c1a90db3282 100644 --- a/libs/@local/hash-isomorphic-utils/src/ontology-type-ids.ts +++ b/libs/@local/hash-isomorphic-utils/src/ontology-type-ids.ts @@ -2,6 +2,11 @@ import type { VersionedUrl } from "@blockprotocol/type-system/slim"; import type { BaseUrl } from "@local/hash-graph-types/ontology"; export const systemEntityTypes = { + academicPaper: { + entityTypeId: "https://hash.ai/@hash/types/entity-type/academic-paper/v/1", + entityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/academic-paper/" as BaseUrl, + }, actor: { entityTypeId: "https://hash.ai/@hash/types/entity-type/actor/v/2", entityTypeBaseUrl: @@ -18,6 +23,11 @@ export const systemEntityTypes = { entityTypeBaseUrl: "https://hash.ai/@hash/types/entity-type/block-collection/" as BaseUrl, }, + book: { + entityTypeId: "https://hash.ai/@hash/types/entity-type/book/v/1", + entityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/book/" as BaseUrl, + }, browserPluginSettings: { entityTypeId: "https://hash.ai/@hash/types/entity-type/browser-plugin-settings/v/1", @@ -45,6 +55,11 @@ export const systemEntityTypes = { entityTypeBaseUrl: "https://hash.ai/@hash/types/entity-type/comment-notification/" as BaseUrl, }, + doc: { + entityTypeId: "https://hash.ai/@hash/types/entity-type/doc/v/1", + entityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/doc/" as BaseUrl, + }, document: { entityTypeId: "https://hash.ai/@hash/types/entity-type/document/v/1", entityTypeBaseUrl: @@ -108,6 +123,11 @@ export const systemEntityTypes = { entityTypeBaseUrl: "https://hash.ai/@hash/types/entity-type/instagram-account/" as BaseUrl, }, + institution: { + entityTypeId: "https://hash.ai/@hash/types/entity-type/institution/v/1", + entityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/institution/" as BaseUrl, + }, linearIntegration: { entityTypeId: "https://hash.ai/@hash/types/entity-type/linear-integration/v/7", @@ -151,6 +171,11 @@ export const systemEntityTypes = { entityTypeBaseUrl: "https://hash.ai/@hash/types/entity-type/pdf-document/" as BaseUrl, }, + person: { + entityTypeId: "https://hash.ai/@hash/types/entity-type/person/v/1", + entityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/person/" as BaseUrl, + }, pptxPresentation: { entityTypeId: "https://hash.ai/@hash/types/entity-type/pptx-presentation/v/1", @@ -231,6 +256,12 @@ export const systemEntityTypes = { >; export const systemLinkEntityTypes = { + affiliatedWith: { + linkEntityTypeId: + "https://hash.ai/@hash/types/entity-type/affiliated-with/v/1", + linkEntityTypeBaseUrl: + "https://hash.ai/@hash/types/entity-type/affiliated-with/" as BaseUrl, + }, associatedWithAccount: { linkEntityTypeId: "https://hash.ai/@hash/types/entity-type/associated-with-account/v/1", @@ -463,6 +494,16 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/deleted-at/" as BaseUrl, }, + doi: { + propertyTypeId: "https://hash.ai/@hash/types/property-type/doi/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/doi/" as BaseUrl, + }, + doiLink: { + propertyTypeId: "https://hash.ai/@hash/types/property-type/doi-link/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/doi-link/" as BaseUrl, + }, draftNote: { propertyTypeId: "https://hash.ai/@hash/types/property-type/draft-note/v/1", propertyTypeBaseUrl: @@ -485,6 +526,12 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/entity-edition-id/" as BaseUrl, }, + experimentalSubject: { + propertyTypeId: + "https://hash.ai/@hash/types/property-type/experimental-subject/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/experimental-subject/" as BaseUrl, + }, expiredAt: { propertyTypeId: "https://hash.ai/@hash/types/property-type/expired-at/v/1", propertyTypeBaseUrl: @@ -537,6 +584,11 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/file-storage-region/" as BaseUrl, }, + finding: { + propertyTypeId: "https://hash.ai/@hash/types/property-type/finding/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/finding/" as BaseUrl, + }, flowDefinitionId: { propertyTypeId: "https://hash.ai/@hash/types/property-type/flow-definition-id/v/1", @@ -584,6 +636,11 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/intended-use/" as BaseUrl, }, + isbn: { + propertyTypeId: "https://hash.ai/@hash/types/property-type/isbn/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/isbn/" as BaseUrl, + }, kratosIdentityId: { propertyTypeId: "https://hash.ai/@hash/types/property-type/kratos-identity-id/v/1", @@ -619,6 +676,17 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/manual-inference-configuration/" as BaseUrl, }, + methodology: { + propertyTypeId: "https://hash.ai/@hash/types/property-type/methodology/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/methodology/" as BaseUrl, + }, + numberOfPages: { + propertyTypeId: + "https://hash.ai/@hash/types/property-type/number-of-pages/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/number-of-pages/" as BaseUrl, + }, object: { propertyTypeId: "https://hash.ai/@hash/types/property-type/object/v/1", propertyTypeBaseUrl: @@ -688,6 +756,12 @@ export const systemPropertyTypes = { propertyTypeBaseUrl: "https://hash.ai/@hash/types/property-type/profile-url/" as BaseUrl, }, + publicationYear: { + propertyTypeId: + "https://hash.ai/@hash/types/property-type/publication-year/v/1", + propertyTypeBaseUrl: + "https://hash.ai/@hash/types/property-type/publication-year/" as BaseUrl, + }, readAt: { propertyTypeId: "https://hash.ai/@hash/types/property-type/read-at/v/1", propertyTypeBaseUrl: @@ -832,6 +906,18 @@ export const systemDataTypes = { title: "Actor Type", description: "The type of thing that can, should or will act on something.", }, + bits: { + dataTypeId: "https://hash.ai/@hash/types/data-type/bits/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/bits/" as BaseUrl, + title: "Bits", + description: "A unit of information equal to one binary digit.", + }, + bytes: { + dataTypeId: "https://hash.ai/@hash/types/data-type/bytes/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/bytes/" as BaseUrl, + title: "Bytes", + description: "A unit of information equal to eight bits.", + }, centimeters: { dataTypeId: "https://hash.ai/@hash/types/data-type/centimeters/v/1", dataTypeBaseUrl: @@ -840,6 +926,14 @@ export const systemDataTypes = { description: "A unit of length in the International System of Units (SI), equal to one hundredth of a meter.", }, + currency: { + dataTypeId: "https://hash.ai/@hash/types/data-type/currency/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/currency/" as BaseUrl, + title: "Currency", + description: + "A system of money in common use within a specific environment over time, especially for people in a nation state.", + }, date: { dataTypeId: "https://hash.ai/@hash/types/data-type/date/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/date/" as BaseUrl, @@ -855,6 +949,13 @@ export const systemDataTypes = { description: "A reference to a particular date and time, formatted according to RFC 3339.", }, + doi: { + dataTypeId: "https://hash.ai/@hash/types/data-type/doi/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/doi/" as BaseUrl, + title: "DOI", + description: + "A DOI (Digital Object Identifier), used to identify digital objects such as journal articles or datasets.", + }, email: { dataTypeId: "https://hash.ai/@hash/types/data-type/email/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/email/" as BaseUrl, @@ -862,11 +963,32 @@ export const systemDataTypes = { description: "An identifier for an email box to which messages are delivered.", }, + eur: { + dataTypeId: "https://hash.ai/@hash/types/data-type/eur/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/eur/" as BaseUrl, + title: "EUR", + description: "An amount denominated in Euros.", + }, + feet: { + dataTypeId: "https://hash.ai/@hash/types/data-type/feet/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/feet/" as BaseUrl, + title: "Feet", + description: + "An imperial unit of length. 3 feet equals 1 yard. Equivalent to 0.3048 meters in the International System of Units (SI).", + }, + frequency: { + dataTypeId: "https://hash.ai/@hash/types/data-type/frequency/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/frequency/" as BaseUrl, + title: "Frequency", + description: + "The number of occurrences of a repeating event per unit of time (temporal frequency).", + }, gbp: { dataTypeId: "https://hash.ai/@hash/types/data-type/gbp/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/gbp/" as BaseUrl, title: "GBP", - description: "An amount denominated in British pounds sterling", + description: "An amount denominated in British pounds sterling.", }, gigabytes: { dataTypeId: "https://hash.ai/@hash/types/data-type/gigabytes/v/1", @@ -880,7 +1002,83 @@ export const systemDataTypes = { dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/gigahertz/" as BaseUrl, title: "Gigahertz", - description: "A unit of frequency equal to one billion hertz.", + description: + "A unit of frequency in the International System of Units (SI), equal to one billion hertz.", + }, + gigawatts: { + dataTypeId: "https://hash.ai/@hash/types/data-type/gigawatts/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/gigawatts/" as BaseUrl, + title: "Gigawatts", + description: + "A unit of power in the International System of Units (SI), equal to one billion watts.", + }, + hertz: { + dataTypeId: "https://hash.ai/@hash/types/data-type/hertz/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/hertz/" as BaseUrl, + title: "Hertz", + description: + "A unit of frequency in the International System of Units (SI), equivalent to one cycle per second.", + }, + "imperialLength(uk)": { + dataTypeId: "https://hash.ai/@hash/types/data-type/imperial-length-uk/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/imperial-length-uk/" as BaseUrl, + title: "Imperial Length (UK)", + description: + "A measure of distance in the system of units defined in the British Weights and Measures Acts, in use alongside metric units in the UK and elsewhere.", + }, + "imperialLength(us)": { + dataTypeId: "https://hash.ai/@hash/types/data-type/imperial-length-us/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/imperial-length-us/" as BaseUrl, + title: "Imperial Length (US)", + description: + "A measure of distance in the system of units commonly used in the United States, formally known as United States customary units.", + }, + inches: { + dataTypeId: "https://hash.ai/@hash/types/data-type/inches/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/inches/" as BaseUrl, + title: "Inches", + description: + "An imperial unit of length. 12 inches equals 1 foot. Equivalent to 0.0254 meters in the International System of Units (SI).", + }, + information: { + dataTypeId: "https://hash.ai/@hash/types/data-type/information/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/information/" as BaseUrl, + title: "Information", + description: "A measure of information content.", + }, + integer: { + dataTypeId: "https://hash.ai/@hash/types/data-type/integer/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/integer/" as BaseUrl, + title: "Integer", + description: + "The number zero (0), a positive natural number (e.g. 1, 2, 3), or the negation of a positive natural number (e.g. -1, -2, -3).", + }, + isbn: { + dataTypeId: "https://hash.ai/@hash/types/data-type/isbn/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/isbn/" as BaseUrl, + title: "ISBN", + description: + "International Standard Book Number: a numeric commercial book identifier that is intended to be unique, issued by an affiliate of the International ISBN Agency.", + }, + kilobytes: { + dataTypeId: "https://hash.ai/@hash/types/data-type/kilobytes/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/kilobytes/" as BaseUrl, + title: "Kilobytes", + description: "A unit of information equal to one thousand bytes.", + }, + kilohertz: { + dataTypeId: "https://hash.ai/@hash/types/data-type/kilohertz/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/kilohertz/" as BaseUrl, + title: "Kilohertz", + description: + "A unit of frequency in the International System of Units (SI), equal to one thousand hertz.", }, kilometers: { dataTypeId: "https://hash.ai/@hash/types/data-type/kilometers/v/1", @@ -890,6 +1088,43 @@ export const systemDataTypes = { description: "A unit of length in the International System of Units (SI), equal to one thousand meters.", }, + kilowatts: { + dataTypeId: "https://hash.ai/@hash/types/data-type/kilowatts/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/kilowatts/" as BaseUrl, + title: "Kilowatts", + description: + "A unit of power in the International System of Units (SI), equal to one thousand watts.", + }, + length: { + dataTypeId: "https://hash.ai/@hash/types/data-type/length/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/length/" as BaseUrl, + title: "Length", + description: "A measure of distance.", + }, + megabytes: { + dataTypeId: "https://hash.ai/@hash/types/data-type/megabytes/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/megabytes/" as BaseUrl, + title: "Megabytes", + description: "A unit of information equal to one million bytes.", + }, + megahertz: { + dataTypeId: "https://hash.ai/@hash/types/data-type/megahertz/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/megahertz/" as BaseUrl, + title: "Megahertz", + description: + "A unit of frequency in the International System of Units (SI), equal to one million hertz.", + }, + megawatts: { + dataTypeId: "https://hash.ai/@hash/types/data-type/megawatts/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/megawatts/" as BaseUrl, + title: "Megawatts", + description: + "A unit of power in the International System of Units (SI), equal to one million watts.", + }, meters: { dataTypeId: "https://hash.ai/@hash/types/data-type/meters/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/meters/" as BaseUrl, @@ -897,6 +1132,14 @@ export const systemDataTypes = { description: "The base unit of length in the International System of Units (SI).", }, + "metricLength(si)": { + dataTypeId: "https://hash.ai/@hash/types/data-type/metric-length-si/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/metric-length-si/" as BaseUrl, + title: "Metric Length (SI)", + description: + "A measure of distance in the International System of Units (SI), the international standard for decimal-based measurements.", + }, miles: { dataTypeId: "https://hash.ai/@hash/types/data-type/miles/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/miles/" as BaseUrl, @@ -912,6 +1155,19 @@ export const systemDataTypes = { description: "A unit of length in the International System of Units (SI), equal to one thousandth of a meter.", }, + power: { + dataTypeId: "https://hash.ai/@hash/types/data-type/power/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/power/" as BaseUrl, + title: "Power", + description: "The amount of energy transferred or converted per unit time.", + }, + terabytes: { + dataTypeId: "https://hash.ai/@hash/types/data-type/terabytes/v/1", + dataTypeBaseUrl: + "https://hash.ai/@hash/types/data-type/terabytes/" as BaseUrl, + title: "Terabytes", + description: "A unit of information equal to one trillion bytes.", + }, time: { dataTypeId: "https://hash.ai/@hash/types/data-type/time/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/time/" as BaseUrl, @@ -929,14 +1185,27 @@ export const systemDataTypes = { dataTypeId: "https://hash.ai/@hash/types/data-type/usd/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/usd/" as BaseUrl, title: "USD", - description: "An amount denominated in US Dollars", + description: "An amount denominated in US Dollars.", }, watts: { dataTypeId: "https://hash.ai/@hash/types/data-type/watts/v/1", dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/watts/" as BaseUrl, title: "Watts", description: - "A unit of power in the International System of Units (SI) equal to one joule per second.", + "The unit of power or radiant flux in the International System of Units (SI) – the rate at which work is done or energy is transferred. Equal to one joule per second.", + }, + yards: { + dataTypeId: "https://hash.ai/@hash/types/data-type/yards/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/yards/" as BaseUrl, + title: "Yards", + description: + "An imperial unit of length. 1,760 yards equals 1 mile. Equivalent to 0.9144 meters in the International System of Units (SI).", + }, + year: { + dataTypeId: "https://hash.ai/@hash/types/data-type/year/v/1", + dataTypeBaseUrl: "https://hash.ai/@hash/types/data-type/year/" as BaseUrl, + title: "Year", + description: "A year in the Gregorian calendar.", }, } as const satisfies Record< string, @@ -1588,22 +1857,6 @@ export const blockProtocolDataTypes = { title: "Boolean", description: "A True or False value", }, - emptyList: { - dataTypeId: - "https://blockprotocol.org/@blockprotocol/types/data-type/list/v/1", - dataTypeBaseUrl: - "https://blockprotocol.org/@blockprotocol/types/data-type/list/" as BaseUrl, - title: "List", - description: "An ordered list of values", - }, - null: { - dataTypeId: - "https://blockprotocol.org/@blockprotocol/types/data-type/null/v/1", - dataTypeBaseUrl: - "https://blockprotocol.org/@blockprotocol/types/data-type/null/" as BaseUrl, - title: "Null", - description: "A placeholder value representing 'nothing'", - }, number: { dataTypeId: "https://blockprotocol.org/@blockprotocol/types/data-type/number/v/1", @@ -1628,6 +1881,15 @@ export const blockProtocolDataTypes = { title: "Text", description: "An ordered sequence of characters", }, + value: { + dataTypeId: + "https://blockprotocol.org/@blockprotocol/types/data-type/value/v/1", + dataTypeBaseUrl: + "https://blockprotocol.org/@blockprotocol/types/data-type/value/" as BaseUrl, + title: "Value", + description: + "A piece of data that can be used to convey information about an attribute, quality or state of something.", + }, } as const satisfies Record< string, { diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/academicpaper.ts b/libs/@local/hash-isomorphic-utils/src/system-types/academicpaper.ts new file mode 100644 index 00000000000..b022ea86330 --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/system-types/academicpaper.ts @@ -0,0 +1,242 @@ +/** + * This file was automatically generated – do not edit it. + */ + +import type { + ObjectMetadata, + PropertyProvenance, +} from "@local/hash-graph-client"; +import type { Confidence } from "@local/hash-graph-types/entity"; + +import type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +} from "./shared.js"; + +export type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +}; + +/** + * A paper describing academic research + */ +export type AcademicPaper = { + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/academic-paper/v/1"]; + properties: AcademicPaperProperties; + propertiesWithMetadata: AcademicPaperPropertiesWithMetadata; +}; + +export type AcademicPaperOutgoingLinkAndTarget = never; + +export type AcademicPaperOutgoingLinksByLinkEntityTypeId = {}; + +/** + * A paper describing academic research + */ +export type AcademicPaperProperties = AcademicPaperProperties1 & + AcademicPaperProperties2; +export type AcademicPaperProperties1 = DocProperties; + +export type AcademicPaperProperties2 = { + "https://hash.ai/@hash/types/property-type/doi-link/"?: DOILinkPropertyValue; + "https://hash.ai/@hash/types/property-type/doi/"?: DOIPropertyValue; + "https://hash.ai/@hash/types/property-type/experimental-subject/"?: ExperimentalSubjectPropertyValue; + "https://hash.ai/@hash/types/property-type/finding/"?: FindingPropertyValue; + "https://hash.ai/@hash/types/property-type/methodology/"?: MethodologyPropertyValue; + "https://hash.ai/@hash/types/property-type/summary/": SummaryPropertyValue; + "https://hash.ai/@hash/types/property-type/title/": TitlePropertyValue; +}; + +export type AcademicPaperPropertiesWithMetadata = + AcademicPaperPropertiesWithMetadata1 & AcademicPaperPropertiesWithMetadata2; +export type AcademicPaperPropertiesWithMetadata1 = DocPropertiesWithMetadata; + +export type AcademicPaperPropertiesWithMetadata2 = { + metadata?: ObjectMetadata; + value: { + "https://hash.ai/@hash/types/property-type/doi-link/"?: DOILinkPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/doi/"?: DOIPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/experimental-subject/"?: ExperimentalSubjectPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/finding/"?: FindingPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/methodology/"?: MethodologyPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/summary/": SummaryPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/title/": TitlePropertyValueWithMetadata; + }; +}; + +/** + * A DOI (Digital Object Identifier), used to identify digital objects such as journal articles or datasets. + */ +export type DOIDataType = DOIDataType1; +export type DOIDataType1 = TextDataType; + +export type DOIDataType2 = string; + +export type DOIDataTypeWithMetadata = { + value: DOIDataType; + metadata: DOIDataTypeMetadata; +}; +export type DOIDataTypeMetadata = { + provenance?: PropertyProvenance; + confidence?: Confidence; + dataTypeId: "https://hash.ai/@hash/types/data-type/doi/v/1"; +}; + +/** + * A permanent link for a digital object, using its Digital Object Identifier (DOI), which resolves to a webpage describing it + */ +export type DOILinkPropertyValue = URIDataType; + +export type DOILinkPropertyValueWithMetadata = URIDataTypeWithMetadata; + +/** + * The Digital Object Identifier (DOI) of an object + */ +export type DOIPropertyValue = DOIDataType; + +export type DOIPropertyValueWithMetadata = DOIDataTypeWithMetadata; + +/** + * The type of participant or observed entity in an experiment or study. + */ +export type ExperimentalSubjectPropertyValue = TextDataType; + +export type ExperimentalSubjectPropertyValueWithMetadata = + TextDataTypeWithMetadata; + +/** + * The results or conclusion of an experiment, research project, investigation, etc. + */ +export type FindingPropertyValue = TextDataType; + +export type FindingPropertyValueWithMetadata = TextDataTypeWithMetadata; + +/** + * The procedure via which something was produced, analyzed, or otherwise approached. + */ +export type MethodologyPropertyValue = TextDataType; + +export type MethodologyPropertyValueWithMetadata = TextDataTypeWithMetadata; + +/** + * A unique identifier for a resource (e.g. a URL, or URN). + */ +export type URIDataType = URIDataType1; +export type URIDataType1 = TextDataType; + +export type URIDataType2 = string; + +export type URIDataTypeWithMetadata = { + value: URIDataType; + metadata: URIDataTypeMetadata; +}; +export type URIDataTypeMetadata = { + provenance?: PropertyProvenance; + confidence?: Confidence; + dataTypeId: "https://hash.ai/@hash/types/data-type/uri/v/1"; +}; diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/book.ts b/libs/@local/hash-isomorphic-utils/src/system-types/book.ts new file mode 100644 index 00000000000..f71f56e1cfa --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/system-types/book.ts @@ -0,0 +1,182 @@ +/** + * This file was automatically generated – do not edit it. + */ + +import type { + ObjectMetadata, + PropertyProvenance, +} from "@local/hash-graph-client"; +import type { Confidence } from "@local/hash-graph-types/entity"; + +import type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +} from "./shared.js"; + +export type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +}; + +/** + * A written work, typically longer than an article, often published in print form. + */ +export type Book = { + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/book/v/1"]; + properties: BookProperties; + propertiesWithMetadata: BookPropertiesWithMetadata; +}; + +export type BookOutgoingLinkAndTarget = never; + +export type BookOutgoingLinksByLinkEntityTypeId = {}; + +/** + * A written work, typically longer than an article, often published in print form. + */ +export type BookProperties = BookProperties1 & BookProperties2; +export type BookProperties1 = DocProperties; + +export type BookProperties2 = { + "https://hash.ai/@hash/types/property-type/isbn/"?: ISBNPropertyValue; +}; + +export type BookPropertiesWithMetadata = BookPropertiesWithMetadata1 & + BookPropertiesWithMetadata2; +export type BookPropertiesWithMetadata1 = DocPropertiesWithMetadata; + +export type BookPropertiesWithMetadata2 = { + metadata?: ObjectMetadata; + value: { + "https://hash.ai/@hash/types/property-type/isbn/"?: ISBNPropertyValueWithMetadata; + }; +}; + +/** + * International Standard Book Number: a numeric commercial book identifier that is intended to be unique, issued by an affiliate of the International ISBN Agency. + */ +export type ISBNDataType = ISBNDataType1; +export type ISBNDataType1 = TextDataType; + +export type ISBNDataType2 = string; + +export type ISBNDataTypeWithMetadata = { + value: ISBNDataType; + metadata: ISBNDataTypeMetadata; +}; +export type ISBNDataTypeMetadata = { + provenance?: PropertyProvenance; + confidence?: Confidence; + dataTypeId: "https://hash.ai/@hash/types/data-type/isbn/v/1"; +}; + +/** + * The International Standard Book Number (ISBN) of a book + */ +export type ISBNPropertyValue = ISBNDataType; + +export type ISBNPropertyValueWithMetadata = ISBNDataTypeWithMetadata; diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/doc.ts b/libs/@local/hash-isomorphic-utils/src/system-types/doc.ts new file mode 100644 index 00000000000..b534b926d2a --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/system-types/doc.ts @@ -0,0 +1,117 @@ +/** + * This file was automatically generated – do not edit it. + */ + +import type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +} from "./shared.js"; + +export type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + AuthoredBy, + AuthoredByOutgoingLinkAndTarget, + AuthoredByOutgoingLinksByLinkEntityTypeId, + AuthoredByProperties, + AuthoredByPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Doc, + DocAuthoredByLink, + DocOutgoingLinkAndTarget, + DocOutgoingLinksByLinkEntityTypeId, + DocProperties, + DocPropertiesWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + IntegerDataType, + IntegerDataTypeWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + NumberDataType, + NumberDataTypeWithMetadata, + NumberOfPagesPropertyValue, + NumberOfPagesPropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + PublicationYearPropertyValue, + PublicationYearPropertyValueWithMetadata, + SummaryPropertyValue, + SummaryPropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, + TitlePropertyValue, + TitlePropertyValueWithMetadata, + YearDataType, + YearDataTypeWithMetadata, +}; diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/institution.ts b/libs/@local/hash-isomorphic-utils/src/system-types/institution.ts new file mode 100644 index 00000000000..352eb9d3ab1 --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/system-types/institution.ts @@ -0,0 +1,31 @@ +/** + * This file was automatically generated – do not edit it. + */ + +import type { + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, +} from "./shared.js"; + +export type { + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + TextDataType, + TextDataTypeWithMetadata, +}; diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/person.ts b/libs/@local/hash-isomorphic-utils/src/system-types/person.ts new file mode 100644 index 00000000000..5d5f2e66cd3 --- /dev/null +++ b/libs/@local/hash-isomorphic-utils/src/system-types/person.ts @@ -0,0 +1,67 @@ +/** + * This file was automatically generated – do not edit it. + */ + +import type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + TextDataType, + TextDataTypeWithMetadata, +} from "./shared.js"; + +export type { + AffiliatedWith, + AffiliatedWithOutgoingLinkAndTarget, + AffiliatedWithOutgoingLinksByLinkEntityTypeId, + AffiliatedWithProperties, + AffiliatedWithPropertiesWithMetadata, + DescriptionPropertyValue, + DescriptionPropertyValueWithMetadata, + EmailPropertyValue, + EmailPropertyValueWithMetadata, + Institution, + InstitutionOutgoingLinkAndTarget, + InstitutionOutgoingLinksByLinkEntityTypeId, + InstitutionProperties, + InstitutionPropertiesWithMetadata, + Link, + LinkOutgoingLinkAndTarget, + LinkOutgoingLinksByLinkEntityTypeId, + LinkProperties, + LinkPropertiesWithMetadata, + NamePropertyValue, + NamePropertyValueWithMetadata, + Person, + PersonAffiliatedWithLink, + PersonOutgoingLinkAndTarget, + PersonOutgoingLinksByLinkEntityTypeId, + PersonProperties, + PersonPropertiesWithMetadata, + TextDataType, + TextDataTypeWithMetadata, +}; diff --git a/libs/@local/hash-isomorphic-utils/src/system-types/shared.ts b/libs/@local/hash-isomorphic-utils/src/system-types/shared.ts index f0f1c477c07..3afc16fad4e 100644 --- a/libs/@local/hash-isomorphic-utils/src/system-types/shared.ts +++ b/libs/@local/hash-isomorphic-utils/src/system-types/shared.ts @@ -37,6 +37,39 @@ export type ActorPropertiesWithMetadata = { }; }; +/** + * Something that something is affiliated with. + */ +export type AffiliatedWith = { + entityTypeIds: [ + "https://hash.ai/@hash/types/entity-type/affiliated-with/v/1", + ]; + properties: AffiliatedWithProperties; + propertiesWithMetadata: AffiliatedWithPropertiesWithMetadata; +}; + +export type AffiliatedWithOutgoingLinkAndTarget = never; + +export type AffiliatedWithOutgoingLinksByLinkEntityTypeId = {}; + +/** + * Something that something is affiliated with. + */ +export type AffiliatedWithProperties = AffiliatedWithProperties1 & + AffiliatedWithProperties2; +export type AffiliatedWithProperties1 = LinkProperties; + +export type AffiliatedWithProperties2 = {}; + +export type AffiliatedWithPropertiesWithMetadata = + AffiliatedWithPropertiesWithMetadata1 & AffiliatedWithPropertiesWithMetadata2; +export type AffiliatedWithPropertiesWithMetadata1 = LinkPropertiesWithMetadata; + +export type AffiliatedWithPropertiesWithMetadata2 = { + metadata?: ObjectMetadata; + value: {}; +}; + /** * A user or other entity's preferences for how an application should behave or appear */ @@ -286,7 +319,10 @@ export type ConnectionSourceNamePropertyValueWithMetadata = /** * A reference to a particular date and time, formatted according to RFC 3339. */ -export type DateTimeDataType = string; +export type DateTimeDataType = DateTimeDataType1; +export type DateTimeDataType1 = TextDataType; + +export type DateTimeDataType2 = string; export type DateTimeDataTypeWithMetadata = { value: DateTimeDataType; @@ -319,6 +355,43 @@ export type DisplayNamePropertyValue = TextDataType; export type DisplayNamePropertyValueWithMetadata = TextDataTypeWithMetadata; +/** + * A written work, such as a book or article. + */ +export type Doc = { + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/doc/v/1"]; + properties: DocProperties; + propertiesWithMetadata: DocPropertiesWithMetadata; +}; + +export type DocAuthoredByLink = { linkEntity: AuthoredBy; rightEntity: Person }; + +export type DocOutgoingLinkAndTarget = DocAuthoredByLink; + +export type DocOutgoingLinksByLinkEntityTypeId = { + "https://hash.ai/@hash/types/entity-type/authored-by/v/1": DocAuthoredByLink; +}; + +/** + * A written work, such as a book or article. + */ +export type DocProperties = { + "https://hash.ai/@hash/types/property-type/number-of-pages/"?: NumberOfPagesPropertyValue; + "https://hash.ai/@hash/types/property-type/publication-year/"?: PublicationYearPropertyValue; + "https://hash.ai/@hash/types/property-type/summary/"?: SummaryPropertyValue; + "https://hash.ai/@hash/types/property-type/title/": TitlePropertyValue; +}; + +export type DocPropertiesWithMetadata = { + metadata?: ObjectMetadata; + value: { + "https://hash.ai/@hash/types/property-type/number-of-pages/"?: NumberOfPagesPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/publication-year/"?: PublicationYearPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/summary/"?: SummaryPropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/title/": TitlePropertyValueWithMetadata; + }; +}; + /** * A document file. */ @@ -914,6 +987,53 @@ export type InputUnitCostPropertyValue = NumberDataType; export type InputUnitCostPropertyValueWithMetadata = NumberDataTypeWithMetadata; +/** + * An organization dedicated to a specific purpose, such as education, research, or public service, and structured with formal systems of governance and operation. + */ +export type Institution = { + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/institution/v/1"]; + properties: InstitutionProperties; + propertiesWithMetadata: InstitutionPropertiesWithMetadata; +}; + +export type InstitutionOutgoingLinkAndTarget = never; + +export type InstitutionOutgoingLinksByLinkEntityTypeId = {}; + +/** + * An organization dedicated to a specific purpose, such as education, research, or public service, and structured with formal systems of governance and operation. + */ +export type InstitutionProperties = { + "https://blockprotocol.org/@blockprotocol/types/property-type/description/"?: DescriptionPropertyValue; + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": NamePropertyValue; +}; + +export type InstitutionPropertiesWithMetadata = { + metadata?: ObjectMetadata; + value: { + "https://blockprotocol.org/@blockprotocol/types/property-type/description/"?: DescriptionPropertyValueWithMetadata; + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": NamePropertyValueWithMetadata; + }; +}; + +/** + * The number zero (0), a positive natural number (e.g. 1, 2, 3), or the negation of a positive natural number (e.g. -1, -2, -3). + */ +export type IntegerDataType = IntegerDataType1; +export type IntegerDataType1 = NumberDataType; + +export type IntegerDataType2 = number; + +export type IntegerDataTypeWithMetadata = { + value: IntegerDataType; + metadata: IntegerDataTypeMetadata; +}; +export type IntegerDataTypeMetadata = { + provenance?: PropertyProvenance; + confidence?: Confidence; + dataTypeId: "https://hash.ai/@hash/types/data-type/integer/v/1"; +}; + /** * Something that someone or something is a member of. */ @@ -1050,6 +1170,13 @@ export type NumberDataTypeMetadata = { dataTypeId: "https://blockprotocol.org/@blockprotocol/types/data-type/number/v/1"; }; +/** + * The total number of pages something has. + */ +export type NumberOfPagesPropertyValue = NumberDataType; + +export type NumberOfPagesPropertyValueWithMetadata = NumberDataTypeWithMetadata; + /** * An opaque, untyped JSON object */ @@ -1321,6 +1448,47 @@ export type PagePropertiesWithMetadata2 = { }; }; +/** + * A human being + */ +export type Person = { + entityTypeIds: ["https://hash.ai/@hash/types/entity-type/person/v/1"]; + properties: PersonProperties; + propertiesWithMetadata: PersonPropertiesWithMetadata; +}; + +export type PersonAffiliatedWithLink = { + linkEntity: AffiliatedWith; + rightEntity: Institution; +}; + +export type PersonOutgoingLinkAndTarget = PersonAffiliatedWithLink; + +export type PersonOutgoingLinksByLinkEntityTypeId = { + "https://hash.ai/@hash/types/entity-type/affiliated-with/v/1": PersonAffiliatedWithLink; +}; + +/** + * A human being + */ +export type PersonProperties = { + "https://blockprotocol.org/@blockprotocol/types/property-type/description/"?: DescriptionPropertyValue; + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": NamePropertyValue; + "https://hash.ai/@hash/types/property-type/email/"?: EmailPropertyValue[]; +}; + +export type PersonPropertiesWithMetadata = { + metadata?: ObjectMetadata; + value: { + "https://blockprotocol.org/@blockprotocol/types/property-type/description/"?: DescriptionPropertyValueWithMetadata; + "https://blockprotocol.org/@blockprotocol/types/property-type/name/": NamePropertyValueWithMetadata; + "https://hash.ai/@hash/types/property-type/email/"?: { + value: EmailPropertyValueWithMetadata[]; + metadata?: ArrayMetadata; + }; + }; +}; + /** * The base URL of a pinned entity type. */ @@ -1422,6 +1590,13 @@ export type ProfileURLPropertyValue = TextDataType; export type ProfileURLPropertyValueWithMetadata = TextDataTypeWithMetadata; +/** + * The year in which something was first published. + */ +export type PublicationYearPropertyValue = YearDataType; + +export type PublicationYearPropertyValueWithMetadata = YearDataTypeWithMetadata; + /** * The timestamp of when something was read. */ @@ -1842,3 +2017,21 @@ export type VaultPathPropertyValueWithMetadata = TextDataTypeWithMetadata; export type WebsiteURLPropertyValue = TextDataType; export type WebsiteURLPropertyValueWithMetadata = TextDataTypeWithMetadata; + +/** + * A year in the Gregorian calendar. + */ +export type YearDataType = YearDataType1; +export type YearDataType1 = IntegerDataType; + +export type YearDataType2 = number; + +export type YearDataTypeWithMetadata = { + value: YearDataType; + metadata: YearDataTypeMetadata; +}; +export type YearDataTypeMetadata = { + provenance?: PropertyProvenance; + confidence?: Confidence; + dataTypeId: "https://hash.ai/@hash/types/data-type/year/v/1"; +}; diff --git a/yarn.lock b/yarn.lock index e11970df48b..9803c9913ea 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4355,6 +4355,52 @@ canvas-hypertxt "^1.0.3" react-number-format "^5.0.0" +"@google-cloud/paginator@^5.0.0": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@google-cloud/paginator/-/paginator-5.0.2.tgz#86ad773266ce9f3b82955a8f75e22cd012ccc889" + integrity sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg== + dependencies: + arrify "^2.0.0" + extend "^3.0.2" + +"@google-cloud/projectify@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-4.0.0.tgz#d600e0433daf51b88c1fa95ac7f02e38e80a07be" + integrity sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA== + +"@google-cloud/promisify@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-4.0.0.tgz#a906e533ebdd0f754dca2509933334ce58b8c8b1" + integrity sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g== + +"@google-cloud/storage@7.14.0": + version "7.14.0" + resolved "https://registry.yarnpkg.com/@google-cloud/storage/-/storage-7.14.0.tgz#eda9715f68507949214af804c906eba6d168a214" + integrity sha512-H41bPL2cMfSi4EEnFzKvg7XSb7T67ocSXrmF7MPjfgFB0L6CKGzfIYJheAZi1iqXjz6XaCT1OBf6HCG5vDBTOQ== + dependencies: + "@google-cloud/paginator" "^5.0.0" + "@google-cloud/projectify" "^4.0.0" + "@google-cloud/promisify" "^4.0.0" + abort-controller "^3.0.0" + async-retry "^1.3.3" + duplexify "^4.1.3" + fast-xml-parser "^4.4.1" + gaxios "^6.0.2" + google-auth-library "^9.6.3" + html-entities "^2.5.2" + mime "^3.0.0" + p-limit "^3.0.1" + retry-request "^7.0.0" + teeny-request "^9.0.0" + uuid "^8.0.0" + +"@google-cloud/vertexai@1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@google-cloud/vertexai/-/vertexai-1.9.0.tgz#30941faa920e1218231604285c56aa4ae172b358" + integrity sha512-8brlcJwFXI4fPuBtsDNQqCdWZmz8gV9jeEKOU0vc5H2SjehCQpXK/NwuSEr916zbhlBHtg/sU37qQQdgvh5BRA== + dependencies: + google-auth-library "^9.1.0" + "@graphql-codegen/add@^5.0.3": version "5.0.3" resolved "https://registry.yarnpkg.com/@graphql-codegen/add/-/add-5.0.3.tgz#1ede6bac9a93661ed7fa5808b203d079e1b1d215" @@ -10428,6 +10474,11 @@ resolved "https://registry.yarnpkg.com/@tokenizer/token/-/token-0.3.0.tgz#fe98a93fe789247e998c75e74e9c7c63217aa276" integrity sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A== +"@tootallnate/once@2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" + integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== + "@tootallnate/quickjs-emscripten@^0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz#db4ecfd499a9765ab24002c3b696d02e6d32a12c" @@ -10563,6 +10614,11 @@ "@types/node" "*" "@types/responselike" "*" +"@types/caseless@*": + version "0.12.5" + resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.5.tgz#db9468cb1b1b5a925b8f34822f1669df0c5472f5" + integrity sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg== + "@types/cli-progress@^3.11.0": version "3.11.6" resolved "https://registry.yarnpkg.com/@types/cli-progress/-/cli-progress-3.11.6.tgz#94b334ebe4190f710e51c1bf9b4fedb681fa9e45" @@ -11469,6 +11525,16 @@ "@types/node" "*" safe-buffer "~5.1.1" +"@types/request@^2.48.8": + version "2.48.12" + resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.12.tgz#0f590f615a10f87da18e9790ac94c29ec4c5ef30" + integrity sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw== + dependencies: + "@types/caseless" "*" + "@types/node" "*" + "@types/tough-cookie" "*" + form-data "^2.5.0" + "@types/resolve@1.20.2", "@types/resolve@^1.20.2": version "1.20.2" resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" @@ -12262,7 +12328,7 @@ optionalDependencies: onnxruntime-node "1.14.0" -"@xmldom/xmldom@^0.8.10", "@xmldom/xmldom@^0.8.6", "@xmldom/xmldom@^0.8.8": +"@xmldom/xmldom@^0.8.10", "@xmldom/xmldom@^0.8.6": version "0.8.10" resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.10.tgz#a1337ca426aa61cef9fe15b5b28e340a72f6fa99" integrity sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw== @@ -12951,6 +13017,11 @@ arrify@^1.0.1: resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" integrity "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" +arrify@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + arrivals@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/arrivals/-/arrivals-2.1.2.tgz#bbf8676908f85a133df4c5c1aff8f4cd99e92785" @@ -13189,7 +13260,7 @@ async-limiter@~1.0.0: resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity "sha1-3TeelPDbgxCwgpH51kwyCXZmF/0= sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==" -async-retry@^1.2.1: +async-retry@^1.2.1, async-retry@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/async-retry/-/async-retry-1.3.3.tgz#0e7f36c04d8478e7a58bdbed80cedf977785f280" integrity "sha1-Dn82wE2EeOeli9vtgM7fl3eF8oA= sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==" @@ -16501,6 +16572,16 @@ duplexify@^3.5.0, duplexify@^3.6.0: readable-stream "^2.0.0" stream-shift "^1.0.0" +duplexify@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-4.1.3.tgz#a07e1c0d0a2c001158563d32592ba58bddb0236f" + integrity sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA== + dependencies: + end-of-stream "^1.4.1" + inherits "^2.0.3" + readable-stream "^3.1.1" + stream-shift "^1.0.2" + duration-fns@3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/duration-fns/-/duration-fns-3.0.2.tgz#842223e05750b2aa66a28a24f58b611b71a44d96" @@ -17857,7 +17938,7 @@ fast-xml-parser@4.2.5: dependencies: strnum "^1.0.5" -fast-xml-parser@4.4.1, fast-xml-parser@^4.3.2: +fast-xml-parser@4.4.1, fast-xml-parser@^4.3.2, fast-xml-parser@^4.4.1: version "4.4.1" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz#86dbf3f18edf8739326447bcaac31b4ae7f6514f" integrity sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw== @@ -18332,14 +18413,15 @@ form-data@4.0.0, form-data@^4.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" -form-data@^2.3.1, form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity "sha1-3M5SwF9kTymManq5Nr1yTO/786Y= sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==" +form-data@^2.3.1, form-data@^2.5.0: + version "2.5.2" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.2.tgz#dc653743d1de2fcc340ceea38079daf6e9069fd2" + integrity sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q== dependencies: asynckit "^0.4.0" combined-stream "^1.0.6" mime-types "^2.1.12" + safe-buffer "^5.2.1" form-data@^3.0.0: version "3.0.1" @@ -18350,6 +18432,15 @@ form-data@^3.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity "sha1-3M5SwF9kTymManq5Nr1yTO/786Y= sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + format@^0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" @@ -18592,15 +18683,16 @@ gauge@^3.0.0: strip-ansi "^6.0.1" wide-align "^1.1.2" -gaxios@^6.0.0, gaxios@^6.0.3, gaxios@^6.1.1: - version "6.3.0" - resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.3.0.tgz#5cd858de47c6560caaf0f99bb5d89c5bdfbe9034" - integrity sha512-p+ggrQw3fBwH2F5N/PAI4k/G/y1art5OxKpb2J2chwNNHM4hHuAOtivjPuirMF4KNKwTTUal/lPfL2+7h2mEcg== +gaxios@^6.0.0, gaxios@^6.0.2, gaxios@^6.0.3, gaxios@^6.1.1: + version "6.7.1" + resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.7.1.tgz#ebd9f7093ede3ba502685e73390248bb5b7f71fb" + integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ== dependencies: extend "^3.0.2" https-proxy-agent "^7.0.1" is-stream "^2.0.0" node-fetch "^2.6.9" + uuid "^9.0.1" gcp-metadata@^6.1.0: version "6.1.0" @@ -18989,10 +19081,10 @@ gonzales-pe@^4.3.0: dependencies: minimist "^1.2.5" -google-auth-library@^9.0.0: - version "9.6.3" - resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-9.6.3.tgz#add8935bc5b842a8e80f84fef2b5ed9febb41d48" - integrity sha512-4CacM29MLC2eT9Cey5GDVK4Q8t+MMp8+OEdOaqD9MG6b0dOyLORaaeJMPQ7EESVgm/+z5EKYyFLxgzBJlJgyHQ== +google-auth-library@^9.0.0, google-auth-library@^9.1.0, google-auth-library@^9.6.3: + version "9.15.0" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-9.15.0.tgz#1b009c08557929c881d72f953f17e839e91b009b" + integrity sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ== dependencies: base64-js "^1.3.0" ecdsa-sig-formatter "^1.0.11" @@ -19726,6 +19818,15 @@ http-parser-js@>=0.5.1: resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9" integrity "sha1-AdJwnHnUFpi7AdTezF6dpOSgM9k= sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg==" +http-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43" + integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w== + dependencies: + "@tootallnate/once" "2" + agent-base "6" + debug "4" + http-proxy-agent@^7.0.0, http-proxy-agent@^7.0.1, http-proxy-agent@^7.0.2: version "7.0.2" resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz#9a8b1f246866c028509486585f62b8f2c18c270e" @@ -25177,7 +25278,7 @@ p-is-promise@^2.0.0: resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== -p-limit@3.1.0, p-limit@^3.0.2: +p-limit@3.1.0, p-limit@^3.0.1, p-limit@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" integrity "sha1-4drMvnjQ0TiMoYxk/qOOPlfjcGs= sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==" @@ -25611,12 +25712,12 @@ pathval@^2.0.0: resolved "https://registry.yarnpkg.com/pathval/-/pathval-2.0.0.tgz#7e2550b422601d4f6b8e26f1301bc8f15a741a25" integrity sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA== -pdf2json@^3.0.5: - version "3.0.5" - resolved "https://registry.yarnpkg.com/pdf2json/-/pdf2json-3.0.5.tgz#33fa61d58f8a655a14118311e0366a3f5530cb33" - integrity sha512-Un1yLbSlk/zfwrltgguskExIioXZlFSFwsyXU0cnBorLywbTbcdzmJJEebh+U2cFCtR7y8nDs5lPHAe7ldxjZg== +pdf2json@3.1.4, pdf2json@^3.0.5: + version "3.1.4" + resolved "https://registry.yarnpkg.com/pdf2json/-/pdf2json-3.1.4.tgz#d1bf8cb663bef9568bba3ad6d58b4e2b662c93b8" + integrity sha512-rS+VapXpXZr+5lUpHmRh3ugXdFXp24p1RyG24yP1DMpqP4t0mrYNGpLtpSbWD42PnQ59GIXofxF+yWb7M+3THg== dependencies: - "@xmldom/xmldom" "^0.8.8" + "@xmldom/xmldom" "^0.8.10" pdfjs-dist@4.4.168: version "4.4.168" @@ -27959,6 +28060,15 @@ ret@~0.1.10: resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity "sha1-uKSCXVvbH8P29Twrwz+BOIaBx7w= sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" +retry-request@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/retry-request/-/retry-request-7.0.2.tgz#60bf48cfb424ec01b03fca6665dee91d06dd95f3" + integrity sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w== + dependencies: + "@types/request" "^2.48.8" + extend "^3.0.2" + teeny-request "^9.0.0" + retry@0.13.1, retry@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" @@ -28139,12 +28249,12 @@ safe-array-concat@^1.1.0: has-symbols "^1.0.3" isarray "^2.0.5" -safe-buffer@5.1.2, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity "sha1-mR7GnSluAxN0fVm9/St0XDX4go0= sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" -safe-buffer@5.2.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity "sha1-Hq+fqb2x/dTsdfWPnNtOa3gn7sY= sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" @@ -29298,6 +29408,13 @@ stream-chain@^2.0.3: resolved "https://registry.yarnpkg.com/stream-chain/-/stream-chain-2.2.5.tgz#b30967e8f14ee033c5b9a19bbe8a2cba90ba0d09" integrity sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA== +stream-events@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/stream-events/-/stream-events-1.0.5.tgz#bbc898ec4df33a4902d892333d47da9bf1c406d5" + integrity sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg== + dependencies: + stubs "^3.0.0" + stream-json@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/stream-json/-/stream-json-1.1.3.tgz#4ebef75590f3af2a7291726a8e2bb2ce06d2c166" @@ -29305,10 +29422,10 @@ stream-json@1.1.3: dependencies: stream-chain "^2.0.3" -stream-shift@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" - integrity "sha1-1wiCgVWasneEJCebCHfaPDktWj0= sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" +stream-shift@^1.0.0, stream-shift@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.3.tgz#85b8fab4d71010fc3ba8772e8046cc49b8a3864b" + integrity sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ== stream-transform@^2.1.3: version "2.1.3" @@ -29388,7 +29505,7 @@ string-trim-spaces-only@^5.0.10: resolved "https://registry.yarnpkg.com/string-trim-spaces-only/-/string-trim-spaces-only-5.0.10.tgz#62f1d32ff8191cbfccb43c08e645c38444d61b77" integrity sha512-MhmjE5jNqb1Ylo+BARPRlsdChGLrnPpAUWrT1VOxo9WhWwKVUU6CbZTfjwKaQPYTGS/wsX/4Zek88FM2rEb5iA== -"string-width-cjs@npm:string-width@^4.2.0": +"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -29406,15 +29523,6 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" @@ -29535,7 +29643,7 @@ stringify-object@^5.0.0: is-obj "^3.0.0" is-regexp "^3.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -29556,13 +29664,6 @@ strip-ansi@^4.0.0: dependencies: ansi-regex "^3.0.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1, strip-ansi@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -29639,6 +29740,11 @@ strtok3@^6.2.4: "@tokenizer/token" "^0.3.0" peek-readable "^4.1.0" +stubs@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/stubs/-/stubs-3.0.0.tgz#e8d2ba1fa9c90570303c030b6900f7d5f89abe5b" + integrity sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw== + style-loader@3.3.4, style-loader@^3.3.1: version "3.3.4" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.4.tgz#f30f786c36db03a45cbd55b6a70d930c479090e7" @@ -30017,6 +30123,17 @@ tdigest@^0.1.1: dependencies: bintrees "1.0.2" +teeny-request@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-9.0.0.tgz#18140de2eb6595771b1b02203312dfad79a4716d" + integrity sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g== + dependencies: + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + node-fetch "^2.6.9" + stream-events "^1.0.5" + uuid "^9.0.0" + telejson@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/telejson/-/telejson-7.2.0.tgz#3994f6c9a8f8d7f2dba9be2c7c5bbb447e876f32" @@ -32291,7 +32408,7 @@ worker-timers@^7.1.4: worker-timers-broker "^6.1.8" worker-timers-worker "^7.0.71" -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -32317,15 +32434,6 @@ wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"