diff --git a/.gitignore b/.gitignore index d6b07ebd..2ee153da 100644 --- a/.gitignore +++ b/.gitignore @@ -53,3 +53,5 @@ lib .xmark.* .next + +__snapshots__ \ No newline at end of file diff --git a/__tests__/clonetest.js b/__tests__/clonetest.js new file mode 100644 index 00000000..e4a59144 --- /dev/null +++ b/__tests__/clonetest.js @@ -0,0 +1,27 @@ +// Link.react.test.js +import React from 'react'; +import {shallow} from 'enzyme'; +import Clone from '../components/headers/clone.js'; +import renderer from 'react-test-renderer'; + +test('render component', () => { + const component = renderer.create( + , + ); + let tree = component.toJSON(); + expect(tree).toMatchSnapshot(); +}); + +test('debounce onclick', () => { + let wrapper = shallow(); + const cloneMock = jest.fn(); + wrapper.instance().clone = cloneMock; + expect(wrapper.find('span').hasClass('disabled')).toEqual(false); + wrapper.instance().onClick(); + wrapper.instance().onClick(); + wrapper.instance().onClick(); + wrapper.instance().onClick(); + wrapper.instance().onClick(); + expect(wrapper.find('span').hasClass('disabled')).toEqual(true); + expect(cloneMock).toHaveBeenCalledTimes(1); +}); diff --git a/__tests__/s3test.js b/__tests__/s3test.js index 44f515f9..4b2071e5 100644 --- a/__tests__/s3test.js +++ b/__tests__/s3test.js @@ -1,9 +1,8 @@ // @flow +const s3Service = require("./../backend/content-providers/s3/s3"); jest.mock("aws-sdk/clients/s3"); const awsMock = require("aws-sdk/clients/s3"); -const s3Service = require("./../backend/content-providers/s3/s3"); - describe("Test S3 service", () => { test("getObject returns notebook content", done => { awsMock.prototype.getObject = function(params, cb) { diff --git a/babel.config.js b/babel.config.js deleted file mode 100644 index b9de8877..00000000 --- a/babel.config.js +++ /dev/null @@ -1,35 +0,0 @@ -module.exports = function(api) { - if (api) { - const env = api.env(); // eslint-disable-line no-unused-vars - } - - const config = { - presets: ["@babel/preset-env", "@babel/preset-react"], - plugins: [ - "styled-jsx/babel", - "@babel/plugin-syntax-dynamic-import", - "@babel/plugin-proposal-object-rest-spread", - "@babel/plugin-proposal-class-properties", - "@babel/plugin-proposal-export-default-from", - [ - "@babel/plugin-transform-runtime", - { - corejs: 2 - } - ] - ], - overrides: [ - { - test: ["**/*.js", "**/*.jsx"], - presets: ["@babel/preset-flow"], - plugins: ["@babel/plugin-transform-flow-strip-types"] - }, - { - test: ["**/*.ts", "**/*.tsx"], - presets: ["@babel/preset-typescript"] - } - ] - }; - - return config; -}; diff --git a/backend/config.js b/backend/config.js deleted file mode 100644 index 7c9a1351..00000000 --- a/backend/config.js +++ /dev/null @@ -1,144 +0,0 @@ -// @flow - -function deprecate(env: Object, oldVar: string, newVar: string) { - if (env[oldVar]) { - console.warn(`${oldVar} is deprecated, please use ${newVar}`); - } -} - -// eslint-disable-next-line no-unused-vars -function populateLocalStorageOptions(env): Object { - let baseDirectory = process.env.COMMUTER_LOCAL_STORAGE_BASEDIRECTORY; - - if (!baseDirectory) { - baseDirectory = process.cwd(); - console.warn("Running in the current working directory, ", baseDirectory); - } - - return { - local: { - baseDirectory - } - }; -} - -function populateS3Options(env): Object { - deprecate(env, "COMMUTER_BASEPATH", "COMMUTER_S3_BASE_PREFIX"); - deprecate(env, "COMMUTER_PATH_DELIMITER", "COMMUTER_S3_PATH_DELIMITER"); - - if (!env.COMMUTER_BUCKET) { - throw "S3 Bucket Name Missing"; - } - - const s3PathDelimiter = - env.COMMUTER_S3_PATH_DELIMITER || env.COMMUTER_PATH_DELIMITER || "/"; - - const s3BasePrefix = ( - env.COMMUTER_S3_BASE_PREFIX || - env.COMMUTER_BASEPATH || // deprecated - "" - ) - // trim off trailing slashes - .replace(/\/+$/, ""); - - const s3Endpoint = - env.COMMUTER_S3_ENDPOINT || ""; - - // only interpret "true" as true otherwise false - const s3ForcePathStyle = /^true$/i.test( - env.COMMUTER_S3_FORCE_PATH_STYLE || ""); - - const config = { - s3: { - params: { - // required s3 bucket name - Bucket: env.COMMUTER_BUCKET - }, - // required key - accessKeyId: env.COMMUTER_S3_KEY, - // required secret - secretAccessKey: env.COMMUTER_S3_SECRET, - endpoint: s3Endpoint, - s3ForcePathStyle: s3ForcePathStyle - }, - s3PathDelimiter, - s3BasePrefix - }; - - return config; -} - -function populateGoogleStorageOptions(env): Object { - if (!env.COMMUTER_BUCKET) { - throw "Bucket Name Missing"; - } - const bucket = env.COMMUTER_BUCKET; - const pathDelimiter = - env.COMMUTER_GCS_PATH_DELIMITER || "/"; - const basePrefix = ( - env.COMMUTER_GCS_BASE_PREFIX || "") - // trim off trailing slashes - .replace(/\/+$/, ""); - return { - bucket, - pathDelimiter, - basePrefix, - }; -} - -function instantiate() { - const storageBackend = ( - process.env.COMMUTER_STORAGE_BACKEND || "local" - ).toLowerCase(); - - if (storageBackend !== "local" && storageBackend !== "s3" && storageBackend !== "gcs") { - throw new Error(`Unknown storageBackend ${storageBackend}`); - } - - let discoveryBackend = process.env.COMMUTER_DISCOVERY_BACKEND || "none"; - // NOTE: The automatic assumption of using elasticsearch could be deprecated - // in favor of selecting it here. Not sure which way to go. - // Deferring that decision to later! - if (discoveryBackend === "none" && process.env.COMMUTER_ES_HOST) { - discoveryBackend = "elasticsearch"; - } - - console.log("CONFIGURED DISCOVERY", discoveryBackend); - - const config = {}; - - switch (storageBackend) { - case "s3": - config.storage = populateS3Options(process.env); - break; - case "gcs": - config.storage = populateGoogleStorageOptions(process.env); - break; - case "local": - default: - config.storage = populateLocalStorageOptions(process.env); - } - - config.storageBackend = storageBackend; - - switch (discoveryBackend) { - case "elasticsearch": - config.discovery = { - elasticsearch: { - host: process.env.COMMUTER_ES_HOST || "", - log: "debug" - } - }; - config.discoveryBackend = "elasticsearch"; - break; - default: - config.discoveryBackend = "none"; - } - - config.nodeEnv = process.env.NODE_ENV || "test"; - config.port = process.env.PORT || process.env.COMMUTER_PORT || 4000; - - return config; -} - -module.exports = instantiate(); diff --git a/backend/content-providers/gcs/contents.js b/backend/content-providers/gcs/contents.js deleted file mode 100644 index 69d84b40..00000000 --- a/backend/content-providers/gcs/contents.js +++ /dev/null @@ -1,56 +0,0 @@ -// @flow - -import type { $Request, $Response } from "express"; - -import type { GoogleStorageProviderOptions } from "./gcs"; - -const express = require("express"); - -const { createGcsService } = require("./gcs"); - -// TODO: typing here reflects what was put in place before, this could be -// more strict while letting flow do the work vs. the testing of common functions -const isDir = (path?: string | null) => !path || (path && path.endsWith("/")); - -const errObject = (err, path) => ({ - message: `${err.message}: ${path}`, - reason: err.code -}); - -function createRouter(config: GoogleStorageProviderOptions): express.Router { - const router = express.Router(); - const gcsService = createGcsService(config); - - router.get("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; - const cb = (err, data) => { - if (err) res.status(500).json(errObject(err, path)); - else res.json(data); - }; - if (isDir(path)) { - gcsService.listObjects(path, cb); - } else { - gcsService.getObject(path, (err, data) => { - if (err && err.code === 404) { - gcsService.listObjects(path.replace(/\/?$/, "/"), cb); - return; - } - if (err) res.status(500).json(errObject(err, path)); - else res.json(data); - }); - } - }); - - router.post("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; - const cb = err => { - if (err) res.status(500).json(errObject(err, path)); - else res.status(201).send(); - }; - gcsService.uploadObject(path, req.body, cb); - }); - - return router; -} - -export { createRouter, isDir }; diff --git a/backend/content-providers/gcs/files.js b/backend/content-providers/gcs/files.js deleted file mode 100644 index 45bbbd6d..00000000 --- a/backend/content-providers/gcs/files.js +++ /dev/null @@ -1,36 +0,0 @@ -// @flow - -const { Storage } = require("@google-cloud/storage"); -import type { GoogleStorageProviderOptions } from "./gcs"; - -const express = require("express"); - -export function createRouter(config: GoogleStorageProviderOptions): express.Router { - const router = express.Router(); - const gcs = new Storage(); - router.get("/*", (req: $Request, res: $Response, next: Function) => { - let key = decodeURIComponent(req.originalUrl.substr(req.baseUrl.length + 1)); - const queryIndex = key.indexOf('?'); - if (queryIndex !== -1) { - key = key.substr(0, queryIndex); - } - if (!key) { - return res.status(404).send('GCS key is missing.'); - } - let readStream = gcs.bucket(config.bucket).file(key).createReadStream() - .on('error', function (err) { - // If the code is PreconditionFailed and we passed an IfNoneMatch param - // the object has not changed, so just return a 304 Not Modified response. - if (err.code === 'NotModified' || - (err.code === 'PreconditionFailed' && s3Params.IfNoneMatch)) { - return res.status(304).end(); - } - if (err.code === 404) { - return res.status(404).send(`Not found: ${key}`); - } - return next(err); - }); - readStream.pipe(res); - }); - return router; -} diff --git a/backend/content-providers/gcs/gcs.js b/backend/content-providers/gcs/gcs.js deleted file mode 100644 index 0ed682d5..00000000 --- a/backend/content-providers/gcs/gcs.js +++ /dev/null @@ -1,119 +0,0 @@ -// @flow -const { Storage } = require("@google-cloud/storage"), - { chain } = require("lodash"); - -export type GoogleStorageProviderOptions = { - bucket: string, - pathDelimiter: string, - basePrefix?: string, -}; - -function createGcsService(config: GoogleStorageProviderOptions) { - const gcs = new Storage(); - - const fileName = (path: string): string => - chain(path) - .trimEnd("/") - .split(config.pathDelimiter) - .last() - .value(); - const filePath = (path: string) => - path - .replace(`${config.basePrefix}`, "") - .replace(/^\//, ""); - const gcsPrefix = (path: string) => - config.basePrefix ? `${config.basePrefix}/${path}` : path; - const dirObject = prefix => ({ - name: fileName(prefix), - path: filePath(prefix), - type: "directory", - writable: true, - created: null, - last_modified: null, - mimetype: null, - content: null, - format: null - }); - const isNotebook = gcsData => gcsData.name.endsWith("ipynb"); - const fileObject = data => ({ - name: fileName(data.name), - path: filePath(data.name), - type: isNotebook(data) ? "notebook" : "file", - writable: true, - created: null, - last_modified: data.LastModified, - mimetype: null, - content: null, - format: null - }); - const listObjects = (path: string, callback: Function) => { - const query = { - prefix: gcsPrefix(path), - delimiter: config.pathDelimiter, - autoPaginate: false, - }; - const dirs = [], - files = [], - bucket = gcs.bucket(config.bucket); - const cb = (err, data, next, res) => { - if (res.prefixes) dirs.push(...res.prefixes.map(dirObject)); - if (data) files.push(...data.map(fileObject)); - if (next) { - bucket.getFiles(next, cb); - } else { - callback(null, { - name: fileName(path), - path: path, - type: "directory", - writable: true, - created: null, - last_modified: null, - mimetype: null, - content: [...files, ...dirs], - format: "json" - }); - } - }; - bucket.getFiles(query, cb); - }; - const getObject = (path: string, callback: Function) => { - gcs.bucket(config.bucket).file(gcsPrefix(path)).get({}, (err, file) => { - if (err) { - callback(err); - return; - } - file.download({}, (err, body) => { - if (err) { - callback(err); - return; - } - let content = body.toString(); - if (isNotebook(file)) { - try { - content = JSON.parse(content); - } catch (err) { - callback(err); - return; - } - } - const ret = Object.assign({}, fileObject(file), { - content - }); - callback(null, ret); - }); - }); - }; - const uploadObject = (path: string, body: mixed, callback: Function) => { - gcs.bucket(config.bucket).file(path).save(JSON.stringify(body), null, (err, data) => { - if (err) callback(err); - else callback(null, data); - }); - }; - return { - listObjects, - getObject, - uploadObject - }; -} - -export { createGcsService }; diff --git a/backend/content-providers/s3/contents.js b/backend/content-providers/s3/contents.js deleted file mode 100644 index b230215d..00000000 --- a/backend/content-providers/s3/contents.js +++ /dev/null @@ -1,65 +0,0 @@ -// @flow - -import type { $Request, $Response } from "express"; - -const express = require("express"); - -const { createS3Service } = require("./s3"); - -// TODO: typing here reflects what was put in place before, this could be -// more strict while letting flow do the work vs. the testing of common functions -const isDir = (path?: string | null) => !path || (path && path.endsWith("/")); - -const errObject = (err, path) => ({ - message: `${err.message}: ${path}`, - reason: err.code -}); - -// TODO: Flow type our config -function createRouter(config: Object): express.Router { - const router = express.Router(); - const s3Service = createS3Service(config); - - router.get("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; - const cb = (err, data) => { - if (err) res.status(500).json(errObject(err, path)); - else res.json(data); - }; - if (isDir(path)) s3Service.listObjects(path, cb); - else { - s3Service.getObject(path, (err, data) => { - if (err && err.code === "NoSuchKey") { - s3Service.listObjects(path.replace(/\/?$/, "/"), cb); - return; - } - - if (err) res.status(500).json(errObject(err, path)); - else res.json(data); - }); - } - }); - - router.delete("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; - const cb = err => { - if (err) res.status(500).json(errObject(err, path)); - else res.status(204).send(); //as per jupyter contents api - }; - if (isDir(path)) s3Service.deleteObjects(path, cb); - else s3Service.deleteObject(path, cb); - }); - - router.post("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; - const cb = err => { - if (err) res.status(500).json(errObject(err, path)); - else res.status(201).send(); - }; - s3Service.uploadObject(path, req.body, cb); - }); - - return router; -} - -export { createRouter, isDir }; diff --git a/backend/content-providers/s3/files.js b/backend/content-providers/s3/files.js deleted file mode 100644 index f73d13d6..00000000 --- a/backend/content-providers/s3/files.js +++ /dev/null @@ -1,22 +0,0 @@ -// @flow - -const express = require("express"), - s3Proxy = require("s3-proxy"); - -// TODO: Flow type config -export function createRouter(config: Object): express.Router { - const router = express.Router(); - - router.get( - "/*", - s3Proxy({ - bucket: config.s3.params.Bucket, - prefix: config.s3BasePrefix, - accessKeyId: config.s3.accessKeyId, - secretAccessKey: config.s3.secretAccessKey, - endpoint: config.s3.endpoint, - overrideCacheControl: "max-age=100000" - }) - ); - return router; -} diff --git a/backend/content-providers/s3/index.js b/backend/content-providers/s3/index.js deleted file mode 100644 index 3d7cd159..00000000 --- a/backend/content-providers/s3/index.js +++ /dev/null @@ -1,6 +0,0 @@ -// @flow - -import { createRouter as createContentsRouter } from "./contents"; -import { createRouter as createFilesRouter } from "./files"; - -export { createContentsRouter, createFilesRouter }; diff --git a/backend/content-providers/s3/s3.js b/backend/content-providers/s3/s3.js deleted file mode 100644 index 9349981f..00000000 --- a/backend/content-providers/s3/s3.js +++ /dev/null @@ -1,176 +0,0 @@ -// @flow -const S3 = require("aws-sdk/clients/s3"), - { chain } = require("lodash"); - -// TODO: Flowtype config -function createS3Service(config: Object) { - const s3 = new S3(config.s3); - - const fileName = (path: string): string => - chain(path) - .trimEnd("/") - .split(config.s3PathDelimiter) - .last() - .value(); - const filePath = (path: string) => - path.replace(`${config.s3BasePrefix}`, "").replace(/^\//, ""); - const s3Prefix = (path: string) => - config.s3BasePrefix ? `${config.s3BasePrefix}/${path}` : path; - const dirObject = data => ({ - name: fileName(data.Prefix), - path: filePath(data.Prefix), - type: "directory", - writable: true, - created: null, - last_modified: null, - mimetype: null, - content: null, - format: null - }); - const isNotebook = s3data => s3data.Key && s3data.Key.endsWith("ipynb"); - const fileObject = data => ({ - name: fileName(data.Key), - path: filePath(data.Key), - type: isNotebook(data) ? "notebook" : "file", - writable: true, - created: null, - last_modified: data.LastModified, - mimetype: null, - content: null, - format: null - }); - const listObjects = (path: string, callback: Function) => { - const params = { - Prefix: s3Prefix(path), - Delimiter: config.s3PathDelimiter, // Maximum allowed by S3 API - MaxKeys: 2147483647, //remove the folder name from listing - StartAfter: s3Prefix(path) - }; - s3.listObjectsV2(params, (err, data) => { - if (err || !data) { - callback(err); - return; - } - if (!data.Contents) { - callback(new Error("Missing contents from S3 Response")); - return; - } - if (!data.CommonPrefixes) { - callback(new Error("Missing CommonPrefixes from S3 Response")); - return; - } - const files = data.Contents.map(fileObject); - const dirs = data.CommonPrefixes.map(dirObject); - callback(null, { - name: fileName(path), - path: path, - type: "directory", - writable: true, - created: null, - last_modified: null, - mimetype: null, - content: [...files, ...dirs], - format: "json" - }); - }); - }; - const getObject = (path: string, callback: Function) => { - s3.getObject({ Key: s3Prefix(path) }, (err, data) => { - if (err) { - callback(err); - return; - } else { - // The Key does not exist on getObject, it's expected to use the path above - const s3Response = Object.assign({}, data, { - Key: s3Prefix(path) - }); - let content = s3Response.Body.toString(); - if (isNotebook(s3Response)) { - try { - content = JSON.parse(content); - } catch (err) { - callback(err); - return; - } - } // Notebook files end up as pure json // All other files end up as pure strings in the content field - const file = Object.assign({}, fileObject(s3Response), { - content - }); - callback(null, file); - } - }); - }; - const deleteObject = (path: string, callback: Function) => { - s3.deleteObject({ Key: s3Prefix(path) }, (err, data) => { - if (err) callback(err); - else callback(null, data); - }); - }; - const deleteObjects = (path: string, callback: Function) => { - let objects = [ - { - Key: s3Prefix(path) - } - ]; - let callStack = 1; - const getObjects = path => { - return new Promise((resolve, reject) => { - listObjects(path, (err, data) => { - if (err) { - reject(err); - } - if (!data.content) { - reject(err); - } - callStack -= 1; - data.content.forEach(o => { - if (o.type == "directory") { - callStack += 1; //recurse - getObjects(o.path.substr(1)).then(() => resolve()); - } else - objects.push({ - Key: s3Prefix(o.path.substr(1)) - }); - }); - if (callStack == 0) resolve(); // notify end - }); - }); - }; - const s3Delete = () => { - s3.deleteObjects( - { - Delete: { - Objects: objects, - Quiet: true - } - }, - (err, data) => { - if (err) callback(err); - else callback(null, data); - } - ); - }; - getObjects(path).then(s3Delete); - }; - const uploadObject = (path: string, body: mixed, callback: Function) => { - s3.upload( - { - Key: s3Prefix(path), - Body: JSON.stringify(body) - }, - (err, data) => { - if (err) callback(err); - else callback(null, data); - } - ); - }; - return { - listObjects, - getObject, - deleteObject, - deleteObjects, - uploadObject - }; -} - -export { createS3Service }; diff --git a/backend/discovery-providers/elasticsearch/elasticSearch.js b/backend/discovery-providers/elasticsearch/elasticSearch.js deleted file mode 100644 index 62ef2669..00000000 --- a/backend/discovery-providers/elasticsearch/elasticSearch.js +++ /dev/null @@ -1,37 +0,0 @@ -// @flow - -const bodybuilder = require("bodybuilder"), - elasticsearch = require("elasticsearch"); - -export type ESDiscoveryBackendOptions = { - host: string, - log: string -}; - -function createDiscoveryService(options: ESDiscoveryBackendOptions) { - const client = new elasticsearch.Client(options); - - const list = (cb: Function, error: Function) => - client - .search({ - index: "commuter", - type: "notebooks", - body: bodybuilder() - .sort("last_modified", "desc") - .size(1000) - .build() - }) - .then( - resp => { - cb({ results: resp.hits.hits.map(hit => hit._source) }); - }, - err => { - error(err.message); - } - ); - return { - list - }; -} - -export { createDiscoveryService }; diff --git a/backend/discovery-providers/elasticsearch/index.js b/backend/discovery-providers/elasticsearch/index.js deleted file mode 100644 index f324ae10..00000000 --- a/backend/discovery-providers/elasticsearch/index.js +++ /dev/null @@ -1,26 +0,0 @@ -// @flow - -import type { $Request, $Response } from "express"; - -const express = require("express"); - -const { createDiscoveryService } = require("./elasticSearch"); - -function createDiscoveryRouter(discoveryOptions: Object) { - const discoveryService = createDiscoveryService( - discoveryOptions.elasticsearch - ); - - const router = express.Router(); - router.get("/*", (req: $Request, res: $Response) => { - const successCb = data => res.json(data); - - const errorCb = err => - res.status(err.statusCode).json({ message: err.message }); - - discoveryService.list(successCb, errorCb); - }); - return router; -} - -export { createDiscoveryRouter }; diff --git a/backend/discovery-providers/none/index.js b/backend/discovery-providers/none/index.js deleted file mode 100644 index 642f11ad..00000000 --- a/backend/discovery-providers/none/index.js +++ /dev/null @@ -1,18 +0,0 @@ -// @flow - -import type { $Request, $Response } from "express"; - -const express = require("express"); - -// eslint-disable-next-line no-unused-vars -function createDiscoveryRouter(options?: Object): express.Router { - const router = express.Router(); - router.get("/*", (req: $Request, res: $Response) => { - res.json({ - results: [] - }); - }); - return router; -} - -export { createDiscoveryRouter }; diff --git a/backend/index.js b/backend/index.js deleted file mode 100644 index e23fe060..00000000 --- a/backend/index.js +++ /dev/null @@ -1,16 +0,0 @@ -// @flow -const log = require("log"); - -const createServer = require("./server"); - -createServer() - .then(server => { - const port = server.address().port; - console.log(log); - log.info("Commuter server listening on port " + port); - }) - .catch((e: Error) => { - console.error(e); - console.error(e.stack); - process.exit(-10); - }); diff --git a/backend/resources/commuter.es.mapping.json b/backend/resources/commuter.es.mapping.json deleted file mode 100644 index 87405c82..00000000 --- a/backend/resources/commuter.es.mapping.json +++ /dev/null @@ -1,133 +0,0 @@ -{ - "mappings": { - "notebooks": { - "_timestamp": { - "enabled": true - }, - "properties": { - "name": { - "description": "Notebook name", - "type": "string" - }, - "path": { - "description": "Storage location of the notebook", - "type": "string" - }, - "created": { - "description": "Date created", - "index": "not_analyzed", - "type": "string" - }, - "last_modified": { - "description": "Date modified", - "index": "not_analyzed", - "type": "string" - }, - "mimetype": { - "description": "Content type", - "type": "string" - }, - "format": { - "description": "Format type", - "type": "string" - }, - "type": { - "description": "Notebook type", - "type": "string" - }, - "metadata": { - "properties": { - "kernelspec": { - "description": "Kernel information.", - "type": "object", - "required": ["name", "display_name"], - "properties": { - "name": { - "description": "Name of the kernel specification.", - "type": "string" - }, - "display_name": { - "description": "Name to display in UI.", - "type": "string" - } - } - }, - "language_info": { - "description": "Kernel information.", - "type": "object", - "required": ["name"], - "properties": { - "name": { - "description": "The programming language which this kernel runs.", - "type": "string" - }, - "codemirror_mode": { - "description": "The codemirror mode to use for code in this language.", - "oneOf": [ - { - "type": "string" - }, - { - "type": "object" - } - ], - "type": "object" - }, - "file_extension": { - "description": "The file extension for files in this language.", - "type": "string" - }, - "mimetype": { - "description": "The mimetype corresponding to files in this language.", - "type": "string" - }, - "pygments_lexer": { - "description": "The pygments lexer to use for code in this language.", - "type": "string" - } - } - }, - "orig_nbformat": { - "description": "Original notebook format (major number) before converting the notebook between versions. This should never be written to a file.", - "type": "integer", - "minimum": 1 - }, - "title": { - "description": "The title of the notebook document", - "type": "string" - }, - "authors": { - "description": "The author(s) of the notebook document", - "type": "object", - "item": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "additionalProperties": true - } - }, - "nteract": { - "properties": { - "tags": { - "type": "string", - "description": "Meta data about the notebook" - }, - "description": { - "description": "Short description about the notebook", - "type": "string" - }, - "image": { - "description": "Display image url", - "type": "string" - } - } - } - } - } - } - } - } -} diff --git a/backend/resources/generateMapping.js b/backend/resources/generateMapping.js deleted file mode 100644 index 9a7762d3..00000000 --- a/backend/resources/generateMapping.js +++ /dev/null @@ -1,89 +0,0 @@ -// @flow -const path = require("path"); - -const jsonfile = require("jsonfile"); -const nbformatv4Schema = require("nbschema").v4; - -const writeFile = schema => - jsonfile.writeFileSync( - path.resolve(__dirname, "commuter.es.mapping.json"), - schema, - { spaces: 2 } - ); - -const add_type_info = nbSchema => { - const { properties } = nbSchema.properties.metadata; - //codemirror_mode key needs a type, and nbformat specifies - // "oneOf": [ - // { - // "type": "string" - // }, - // { - // "type": "object" - // } - // ] - properties.language_info.properties.codemirror_mode.type = "object"; - // ES doesn't support array types. Instead use object type (also handles array of object) - //https://www.elastic.co/guide/en/elasticsearch/reference/current/array.html - properties.authors.type = "object"; - return Object.assign({}, properties); -}; - -const nteractSchema = () => - jsonfile.readFileSync( - path.resolve(__dirname, "nteract.metadata.schema.json") - ); - -const getCommuterSchema = () => ({ - mappings: { - notebooks: { - _timestamp: { - enabled: true - }, - properties: { - name: { - description: "Notebook name", - type: "string" - }, - path: { - description: "Storage location of the notebook", - type: "string" - }, - created: { - description: "Date created", - index: "not_analyzed", - type: "string" - }, - last_modified: { - description: "Date modified", - index: "not_analyzed", - type: "string" - }, - mimetype: { - description: "Content type", - type: "string" - }, - format: { - description: "Format type", - type: "string" - }, - type: { - description: "Notebook type", - type: "string" - }, - metadata: { - properties: Object.assign( - {}, - add_type_info(nbformatv4Schema), - nteractSchema() - ) - } - } - } - } -}); - -//IIFE -{ - writeFile(getCommuterSchema()); -} diff --git a/backend/resources/nteract.metadata.schema.json b/backend/resources/nteract.metadata.schema.json deleted file mode 100644 index 8afeee9c..00000000 --- a/backend/resources/nteract.metadata.schema.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "nteract": { - "properties": { - "tags": { - "type": "string", - "description": "Meta data about the notebook" - }, - "description": { - "description": "Short description about the notebook", - "type": "string" - }, - "image": { - "description": "Display image url", - "type": "string" - } - } - } -} diff --git a/backend/routes/api/index.js b/backend/routes/api/index.js deleted file mode 100644 index 1d6240a7..00000000 --- a/backend/routes/api/index.js +++ /dev/null @@ -1,37 +0,0 @@ -// @flow -import type { Middleware, $Request, $Response } from "express"; - -const express = require("express"); -const bodyParser = require("body-parser"); - -const defaultContentTypeMiddleware: Middleware = ( - req: $Request, - res: $Response, - next -) => { - req.headers["content-type"] = - req.headers["content-type"] || "application/json"; - next(); -}; - -type APIRouters = { - contents: express.Router, - discovery: express.Router -}; - -function createAPIRouter(api: APIRouters): express.Router { - const router = express.Router(); - router.use(defaultContentTypeMiddleware); - router.use(bodyParser.json({ limit: "50mb" })); //50mb is the current threshold - router.use(bodyParser.urlencoded({ extended: true })); - - router.use("/ping", (req: $Request, res: $Response) => { - res.json({ message: "pong" }); - }); - - router.use("/contents", api.contents); - router.use("/v1/discovery", api.discovery); - return router; -} - -module.exports = createAPIRouter; diff --git a/backend/routes/index.js b/backend/routes/index.js deleted file mode 100644 index b36cd6f0..00000000 --- a/backend/routes/index.js +++ /dev/null @@ -1,46 +0,0 @@ -// @flow - -const express = require("express"); - -const createAPIRouter = require("./api"); - -function createRouter(config): express.Router { - let contentsProvider; - let discoveryProvider; - - switch (config.storageBackend) { - case "s3": - contentsProvider = require("../content-providers/s3"); - break; - case "gcs": - contentsProvider = require("../content-providers/gcs"); - break; - case "local": - default: - contentsProvider = require("../content-providers/local"); - } - - switch (config.discoveryBackend) { - case "elasticsearch": - // we only provide the elasticsearch storage currently - discoveryProvider = require("../discovery-providers/elasticsearch"); - break; - // Otherwise, we provide a dummy router for now - default: - discoveryProvider = require("../discovery-providers/none"); - } - const apiRouter = createAPIRouter({ - contents: contentsProvider.createContentsRouter(config.storage), - discovery: discoveryProvider.createDiscoveryRouter(config.discovery) - }); - - const router = express.Router(); - - router.use("/api", apiRouter); - router.use("/files", contentsProvider.createFilesRouter(config.storage)); - - return router; -} - -// Keeping the singleton on the export to make it work in-place right now -module.exports = createRouter(require("../config")); diff --git a/backend/server.js b/backend/server.js deleted file mode 100644 index 3f232bc8..00000000 --- a/backend/server.js +++ /dev/null @@ -1,93 +0,0 @@ -// @flow - -import type { $Request, $Response } from "express"; - -const { parse } = require("url"); -const http = require("http"); - -const express = require("express"); -const morgan = require("morgan"); -const log = require("log"); - -const front = require("../frontend"); - -const config = require("./config"); - -function createServer() { - const frontend = front.createNextApp(); - - return frontend.app.prepare().then(() => { - const app = express(); - app.use(morgan("common")); - - log.info(`Node env: ${config.nodeEnv}`); - - const router = require("./routes"); - - /** - * - * /view/ router - * - * On some filetypes it will serve the next.js app and on others it will - * redirect to the literal file. This is to allow for resolving relative - * assets like images within notebooks. - */ - const suffixRegex = /(?:\.([^.]+))?$/; - const renderSuffixes = new Set(["ipynb", "html", "json", "md", "rmd"]); - const renderAccepts = new Set(["text/html", "application/xhtml+xml"]); - const viewHandler = (req: $Request, res: $Response) => { - const presuffix = suffixRegex.exec(req.path); - - if (!presuffix) { - return null; - } - - const suffix = (presuffix[1] || "").toLowerCase(); - const accepts = (req.headers.accept || "").split(","); - - if ( - // If one of our suffixes is a renderable item - renderSuffixes.has(suffix) || - // If the file is requested as `text/html` first and foremost, we'll also - // render our file viewer - renderAccepts.has(accepts[0]) || - renderAccepts.has(accepts[1]) - ) { - const { query } = parse(req.url, true); - const viewPath = req.params["0"] || "/"; - const q = Object.assign({}, { viewPath }, query); - return frontend.app.render(req, res, "/view", q); - } - - const newPath = req.path.replace(/^\/view/, "/files"); - res.redirect(newPath); - return; - }; - - /** - * Express middleware for letting our next.js frontend do the handling - */ - const passToNext = (req: $Request, res: $Response) => { - return frontend.handle(req, res); - }; - - router.get(["/view", "/view*"], viewHandler); - router.get("*", passToNext); - - // TODO: Leaving this here for the eventual baseURL handling - const baseURI = "/"; - app.use(baseURI, router); - // TODO: This is duplicate until we're doing proper baseURL handling - app.get(["/view", "/view*"], viewHandler); - app.use(passToNext); - - const server = http.createServer(app); - - return new Promise(accept => { - // $FlowFixMe - server.listen(config.port, () => accept(server)); - }); - }); -} - -module.exports = createServer; diff --git a/backend/src/config.ts b/backend/src/config.ts new file mode 100644 index 00000000..10052b9f --- /dev/null +++ b/backend/src/config.ts @@ -0,0 +1,101 @@ +// The config object that gets passed around to instantiate the server +export type Config = { + storageBackend: "s3" | "local"; + themis_url: string | null; + port: string | number; + nodeEnv: string; + storage: { + [key: string]: any; + }; + s3storage: S3StorageConfig; + clone_server_endpoint?: string; +}; + +export type S3StorageConfig = { + s3: { + params: { + // required s3 bucket name + Bucket: string; + }; + // required key + accessKeyId?: string; + // required secret + secretAccessKey?: string; + }; + artifactPrefix: string; + s3PathDelimiter: string; +}; + +function deprecate(env: NodeJS.ProcessEnv, oldVar: string, newVar: string) { + if (env[oldVar]) { + console.warn(`${oldVar} is deprecated, please use ${newVar}`); + } +} + +function populateLocalStorageOptions(env: NodeJS.ProcessEnv) { + let baseDirectory = process.env.COMMUTER_LOCAL_STORAGE_BASEDIRECTORY; + + if (!baseDirectory) { + baseDirectory = process.cwd(); + console.warn("Running in the current working directory, ", baseDirectory); + } + + return { + local: { + baseDirectory + } + }; +} + +function populateS3Options(env: NodeJS.ProcessEnv): S3StorageConfig { + deprecate(env, "COMMUTER_BASEPATH", "COMMUTER_S3_BASE_PREFIX"); + deprecate(env, "COMMUTER_PATH_DELIMITER", "COMMUTER_S3_PATH_DELIMITER"); + + if (!env.COMMUTER_BUCKET) { + throw "S3 Bucket Name Missing"; + } + + const s3PathDelimiter = + env.COMMUTER_S3_PATH_DELIMITER || env.COMMUTER_PATH_DELIMITER || "/"; + + const config: S3StorageConfig = { + s3: { + params: { + // required s3 bucket name + Bucket: env.COMMUTER_BUCKET + }, + // required key + accessKeyId: env.COMMUTER_S3_KEY, + // required secret + secretAccessKey: env.COMMUTER_S3_SECRET + }, + artifactPrefix: env.COMMUTER_S3_ARTIFACT_PREFIX || "papermill", + s3PathDelimiter + }; + + return config; +} + +function instantiate(): Config { + const storageBackend = ( + process.env.COMMUTER_STORAGE_BACKEND || "local" + ).toLowerCase(); + + if (storageBackend !== "local" && storageBackend !== "s3") { + throw new Error(`Unknown storageBackend ${storageBackend}`); + } + + const config: Config = { + port: process.env.PORT || process.env.COMMUTER_PORT || 4000, + nodeEnv: process.env.NODE_ENV || "test", + themis_url: process.env.THEMIS_URL || null, + storageBackend: storageBackend, + storage: populateLocalStorageOptions(process.env), + s3storage: populateS3Options(process.env), + clone_server_endpoint: process.env.CLONE_ENDPOINT || "http://localhost:8888" + }; + + return config; +} + +module.exports = instantiate(); diff --git a/backend/content-providers/local/contents.js b/backend/src/content-providers/local/contents.ts similarity index 64% rename from backend/content-providers/local/contents.js rename to backend/src/content-providers/local/contents.ts index 429f2699..76e2751a 100644 --- a/backend/content-providers/local/contents.js +++ b/backend/src/content-providers/local/contents.ts @@ -1,14 +1,12 @@ -// @flow -import type { $Request, $Response } from "express"; +import express = require("express"); -import type { DiskProviderOptions } from "./fs"; +import * as fs from "./fs"; -const express = require("express"); - -const fs = require("./fs"); +import { DiskProviderOptions } from "./fs"; +import { ParamsDictionary } from "express-serve-static-core"; type ErrorResponse = { - message: string + message: string; }; export function createRouter(options: DiskProviderOptions) { @@ -16,13 +14,14 @@ export function createRouter(options: DiskProviderOptions) { throw new Error("Base directory must be specified for the local provider"); } const router = express.Router(); - router.get("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; + router.get("/*", (req: express.Request, res: express.Response) => { + const params = req.params as ParamsDictionary; + const path = params["0"]; fs.get(options, path) - .then(content => { + .then((content: any) => { res.json(content); }) - .catch((err: ErrnoError) => { + .catch((err: NodeJS.ErrnoException) => { const errorResponse: ErrorResponse = { message: `${err.message}: ${path}` }; @@ -41,11 +40,12 @@ export function createRouter(options: DiskProviderOptions) { res.status(500).json(errorResponse); }); }); - router.post("/*", (req: $Request, res: $Response) => { - const path = req.params["0"]; + router.post("/*", (req: express.Request, res: express.Response) => { + const params = req.params as ParamsDictionary; + const path = params["0"]; fs.post(options, path, req.body) .then(() => res.status(201).send()) - .catch((err: ErrnoError) => { + .catch((err: NodeJS.ErrnoException) => { const errorResponse: ErrorResponse = { message: `${err.message}: ${path}` }; diff --git a/backend/content-providers/local/files.js b/backend/src/content-providers/local/files.ts similarity index 65% rename from backend/content-providers/local/files.js rename to backend/src/content-providers/local/files.ts index 812f4bb6..5d85fb3e 100644 --- a/backend/content-providers/local/files.js +++ b/backend/src/content-providers/local/files.ts @@ -1,17 +1,12 @@ -// @flow -import type { $Request, $Response } from "express"; +import express = require("express"); -import type { DiskProviderOptions } from "./fs"; - -const fs = require("fs"); -const path = require("path"); - -const express = require("express"); - -const sanitizeFilePath = require("./fs").sanitizeFilePath; +import * as fs from "fs"; +import * as path from "path"; +import { sanitizeFilePath, DiskProviderOptions } from "./fs"; +import { ParamsDictionary } from "express-serve-static-core"; type ErrorResponse = { - message: string + message: string; }; export function createRouter(options: DiskProviderOptions) { @@ -21,8 +16,9 @@ export function createRouter(options: DiskProviderOptions) { const router = express.Router(); - router.get("/*", (req: $Request, res: $Response) => { - const unsafeFilePath = req.params["0"]; + router.get("/*", (req: express.Request, res: express.Response) => { + const params = req.params as ParamsDictionary; + const unsafeFilePath = params["0"]; const filePath = path.join( options.local.baseDirectory, @@ -32,7 +28,7 @@ export function createRouter(options: DiskProviderOptions) { // Assume it's a file by default, fall to error handling otherwise const rs = fs.createReadStream(filePath); - rs.on("error", err => { + rs.on("error", (err: NodeJS.ErrnoException) => { const errorResponse: ErrorResponse = { message: `${err.message}: ${filePath}` }; diff --git a/backend/content-providers/local/fs.js b/backend/src/content-providers/local/fs.ts similarity index 87% rename from backend/content-providers/local/fs.js rename to backend/src/content-providers/local/fs.ts index 08ab4b2a..706c14b4 100644 --- a/backend/content-providers/local/fs.js +++ b/backend/src/content-providers/local/fs.ts @@ -1,17 +1,22 @@ -// @flow - /** * Local storage provider for commuter */ -const path = require("path"); +import fs = require("fs-extra"); +import * as path from "path"; -const fs = require("fs-extra"); +import { + Content, + FileContent, + NotebookContent, + DirectoryContent, + ContentError +} from "../../types"; export type DiskProviderOptions = { local: { - baseDirectory: string - } + baseDirectory: string; + }; }; /** @@ -28,20 +33,18 @@ function cleanBaseDir(s: string) { function createContentResponse( parsedFilePath: { - root: string, - dir: string, - base: string, - ext: string, - name: string + root: string; + dir: string; + base: string; + ext: string; + name: string; }, - // $FlowFixMe: fs-extra stat: fs.Stats -): JupyterApi$Content { +): Content { const name = cleanBaseDir(parsedFilePath.base); const filePath = cleanBaseDir( path.join(parsedFilePath.dir, parsedFilePath.base) ); - // $FlowFixMe: fs-extra const writable = Boolean(fs.constants.W_OK & stat.mode); const created: Date = stat.birthtime; const last_modified = stat.mtime; @@ -52,7 +55,7 @@ function createContentResponse( mimetype: null, format: "json", content: null, - writable, + writable: true, name: name === "." ? "" : name, path: filePath === "." ? "" : filePath, created, @@ -66,7 +69,7 @@ function createContentResponse( mimetype: null, format: "json", content: null, - writable, + writable: true, name, path: filePath, created, @@ -80,7 +83,7 @@ function createContentResponse( mimetype: null, format: "text", content: null, - writable, + writable: true, name, path: filePath, created, @@ -96,7 +99,7 @@ function createContentResponse( function createContentPromise( options: DiskProviderOptions, filePath: string -): Promise { +): Promise { const fullPath = path.join(options.local.baseDirectory, filePath); const parsedFilePath = path.parse(filePath); return new Promise((resolve, reject) => { @@ -131,11 +134,12 @@ export function sanitizeFilePath(unsafeFilePath: string): string { export function get( options: DiskProviderOptions, unsafeFilePath: string -): Promise { +): Promise { const filePath = sanitizeFilePath(unsafeFilePath); // TODO: filePath should be normalized const contentP = createContentPromise(options, filePath); + // @ts-ignore return contentP.then(content => { if (content.type === "directory") { return getDirectory(options, content); @@ -149,6 +153,7 @@ export function get( return { reason: "Unsupported content", + // @ts-ignore message: `Unrecognized content type "${content.type}"` }; }); @@ -157,7 +162,7 @@ export function get( export function post( options: DiskProviderOptions, unsafeFilePath: string, - content: mixed + content: any ) { const filePath = path.join( options.local.baseDirectory, @@ -168,8 +173,8 @@ export function post( function getDirectory( options: DiskProviderOptions, - directory: JupyterApi$DirectoryContent -): Promise { + directory: DirectoryContent +): Promise { return new Promise((resolve, reject) => { fs.readdir( path.join(options.local.baseDirectory, directory.path), @@ -199,7 +204,6 @@ function getDirectory( ); Promise.all(contentPromises) - // $FlowFixMe .then(contents => contents.filter(x => !(x === null || x === undefined)) ) @@ -213,11 +217,10 @@ function getDirectory( function getFile( options: DiskProviderOptions, - file: JupyterApi$FileContent -): Promise { + file: FileContent +): Promise { return new Promise((resolve, reject) => { // TODO: Should we support a streaming interface or nah - // $FlowFixMe: fs-extra fs.readFile( path.join(options.local.baseDirectory, file.path), (err, data) => { @@ -232,10 +235,7 @@ function getFile( // We will not send the content, as the interface // currently doesn't render it. But this is a bad // contract. - // - // We denote the format as null rather than some strange format since we need to - // stay spec compliant with jupyter - format = null; + format = "binary"; str = ""; break; } @@ -248,11 +248,10 @@ function getFile( function getNotebook( options: DiskProviderOptions, - notebook: JupyterApi$NotebookContent -): Promise { + notebook: NotebookContent +): Promise { return new Promise((resolve, reject) => { // TODO: Should we support a streaming interface or nah - // $FlowFixMe: fs-extra fs.readFile( path.join(options.local.baseDirectory, notebook.path), (err, data) => { diff --git a/backend/content-providers/gcs/index.js b/backend/src/content-providers/local/index.ts similarity index 94% rename from backend/content-providers/gcs/index.js rename to backend/src/content-providers/local/index.ts index 3d7cd159..bf7f34db 100644 --- a/backend/content-providers/gcs/index.js +++ b/backend/src/content-providers/local/index.ts @@ -1,5 +1,3 @@ -// @flow - import { createRouter as createContentsRouter } from "./contents"; import { createRouter as createFilesRouter } from "./files"; diff --git a/backend/src/index.ts b/backend/src/index.ts new file mode 100644 index 00000000..a90813b9 --- /dev/null +++ b/backend/src/index.ts @@ -0,0 +1,22 @@ +import { createServer } from "./server"; + +import * as http from "http"; + +const Log = require("log"), + log = new Log("info"); + +createServer() + .then((server: http.Server) => { + const address = server.address(); + + if (typeof address === "string") { + log.info("Commuter server listening at ", address); + } else { + log.info("Commuter server listening on port ", address!.port); + } + }) + .catch((e: Error) => { + console.error(e); + console.error(e.stack); + process.exit(-10); + }); diff --git a/backend/src/routes/api/index.ts b/backend/src/routes/api/index.ts new file mode 100644 index 00000000..ac6ed980 --- /dev/null +++ b/backend/src/routes/api/index.ts @@ -0,0 +1,83 @@ +import express = require("express"); + +import bodyParser = require("body-parser"); + +// Sadly because of the singleton + export we're stuck with a classic require here +const config = require("../../config"); + +const defaultContentTypeMiddleware = ( + req: express.Request, + res: express.Response, + next: express.NextFunction +) => { + req.headers["content-type"] = + req.headers["content-type"] || "application/json"; + next(); +}; + +type APIRouters = { + contents: express.Router; + s3contents: express.Router; +}; + +function createAPIRouter(api: APIRouters): express.Router { + const router = express.Router(); + router.use(defaultContentTypeMiddleware); + router.use(bodyParser.json({ limit: "50mb" })); //50mb is the current threshold + router.use(bodyParser.urlencoded({ extended: true })); + + router.use("/ping", (req: express.Request, res: express.Response) => { + res.json({ message: "pong" }); + }); + + router.use("/contents", api.contents); + router.use("/s3-contents", api.s3contents); + router.use("/s3-clone", function(req, res) { + const { s3Bucket, s3Key, versionId } = req.body; + if (config.clone_server_endpoint === null) { + res.status(400).json({ + message: + "No backend configured for cloning. /api/s3-clone is not available" + }); + return; + } + let notebookServerEndpoint = config.clone_server_endpoint; + const bookstoreCloneEndpoint = "/bookstore/clone"; + let bduser = req.headers["bd-preferred-username"]; + if (bduser) { + notebookServerEndpoint += `/${bduser}/ipynb`; + } + const queryString = `?s3_bucket=${s3Bucket}&s3_key=${s3Key}${ + versionId ? "&s3_version_id=" + versionId : "" + }`; + res.json({ + url: `${notebookServerEndpoint}${bookstoreCloneEndpoint}${queryString}` + }); + + return; + }); + router.use("/clone", function(req, res) { + const { relpath } = req.body; + if (config.clone_server_endpoint === null) { + res.status(400).json({ + message: + "No backend configured for cloning. /api/clone is not available" + }); + return; + } + let notebookServerEndpoint = config.clone_server_endpoint; + const bookstoreCloneEndpoint = "/bookstore/fs-clone"; + let bduser = req.headers["bd-preferred-username"]; + if (bduser) { + notebookServerEndpoint += `/${bduser}/ipynb`; + } + res.json({ + url: `${notebookServerEndpoint}${bookstoreCloneEndpoint}?relpath=${relpath}` + }); + + return; + }); + return router; +} + +module.exports = createAPIRouter; diff --git a/backend/src/routes/api/s3/contents.ts b/backend/src/routes/api/s3/contents.ts new file mode 100644 index 00000000..d1695c58 --- /dev/null +++ b/backend/src/routes/api/s3/contents.ts @@ -0,0 +1,193 @@ +import express = require("express"); +import S3 = require("aws-sdk/clients/s3"); + +import { chain } from "lodash"; + +import { S3StorageConfig } from "../../../config"; +import { ParamsDictionary } from "express-serve-static-core"; + +const isDir = (path?: string | null) => !path || (path && path.endsWith("/")); + +const errObject = (err: NodeJS.ErrnoException, path: string) => ({ + message: `${err.message}: ${path}`, + reason: err.code +}); + +function createRouter(config: S3StorageConfig): express.Router { + const router = express.Router(); + const s3 = new S3(); + + const fileName = (path: string | undefined): string => + chain(path) + .trimEnd("/") + .split(config.s3PathDelimiter) + .last() + .value() || ""; + const isNotebook = (s3data: any) => + s3data.Key && s3data.Key.endsWith("ipynb"); + + const dirObject = (bucket: string) => (data: S3.CommonPrefix) => ({ + name: fileName(data.Prefix), + path: `${bucket}/${data.Prefix}`, + type: "directory", + writable: true, + created: null, + last_modified: null, + mimetype: null, + content: null, + format: null + }); + + const fileObject = (bucket: string) => (data: { + Key?: string; + LastModified?: Date; + }) => { + return { + name: fileName(data.Key), + path: `${bucket}/${data.Key}`, + type: isNotebook(data) ? "notebook" : "file", + writable: true, + created: null, + last_modified: data.LastModified, + mimetype: null, + content: null, + format: null + }; + }; + + router.get( + ["/versions/:bucket/*"], + + (req: express.Request, res: express.Response) => { + const params = req.params as ParamsDictionary; + const path = params["0"]; + const cb = (err: NodeJS.ErrnoException, data: any) => { + if (err) res.status(500).json(errObject(err, path)); + else res.json(data); + }; + const listObjectVersions = (path: string, callback: Function) => { + const S3Params = { + Bucket: params.bucket, + Prefix: path, + Delimiter: config.s3PathDelimiter, + MaxKeys: 2147483647 + }; + s3.listObjectVersions( + S3Params as S3.ListObjectVersionsRequest, + (err, data) => { + if (err || !data) { + callback(err); + return; + } + if (!data.Versions) { + callback(new Error("Missing versions from S3 Response")); + return; + } + callback(null, data.Versions); + } + ); + }; + listObjectVersions(path, cb); + } + ); + + router.get( + ["/:bucket/*", "/:bucket"], + (req: express.Request, res: express.Response) => { + const params = req.params as ParamsDictionary; + const path = params["0"]; + const cb = (err: NodeJS.ErrnoException, data: any) => { + if (err) res.status(500).json(errObject(err, path)); + else res.json(data); + }; + + const listObjects = (path: string, callback: Function) => { + const S3Params = { + Bucket: params.bucket, + Prefix: path, + Delimiter: config.s3PathDelimiter, // Maximum allowed by S3 API + MaxKeys: 2147483647, //remove the folder name from listing + StartAfter: path + }; + s3.listObjectsV2(S3Params as S3.ListObjectsV2Request, (err, data) => { + if (err || !data) { + callback(err); + return; + } + if (!data.Contents) { + callback(new Error("Missing contents from S3 Response")); + return; + } + if (!data.CommonPrefixes) { + callback(new Error("Missing CommonPrefixes from S3 Response")); + return; + } + const files = data.Contents.map(fileObject(params.bucket)); + const dirs = data.CommonPrefixes.map(dirObject(params.bucket)); + callback(null, { + name: fileName(path), + path: path, + type: "directory", + writable: true, + created: null, + last_modified: null, + mimetype: null, + content: [...files, ...dirs], + format: "json" + }); + }); + }; + + const getObject = (path: string, callback: Function) => { + const S3Params = { + Bucket: params.bucket, + Key: path, + ...(req.query.VersionId && { VersionId: req.query.VersionId }) + }; + s3.getObject(S3Params as S3.GetObjectRequest, (err, data) => { + if (err) { + callback(err); + return; + } else { + // The Key does not exist on getObject, it's expected to use the path above + const s3Response = Object.assign({ Body: "" }, data, S3Params); + let content = s3Response.Body.toString(); + if (isNotebook(s3Response)) { + try { + content = JSON.parse(content); + } catch (err) { + callback(err); + return; + } + } // Notebook files end up as pure json // All other files end up as pure strings in the content field + const file = Object.assign( + {}, + fileObject(params.bucket)(s3Response), + { + content + } + ); + callback(null, file); + } + }); + }; + + if (isDir(path)) listObjects(path, cb); + else { + getObject(path, (err: NodeJS.ErrnoException, data: any) => { + if (err && err.code === "NoSuchKey") { + listObjects(path.replace(/\/?$/, "/"), cb); + return; + } + + if (err) res.status(500).json(errObject(err, path)); + else res.json(data); + }); + } + } + ); + + return router; +} + +export { createRouter, isDir }; diff --git a/backend/src/routes/api/s3/files.ts b/backend/src/routes/api/s3/files.ts new file mode 100644 index 00000000..aa065b44 --- /dev/null +++ b/backend/src/routes/api/s3/files.ts @@ -0,0 +1,256 @@ +import express = require("express"); + +import { Request, Response } from "express"; + +import { ParamsDictionary } from "express-serve-static-core"; +import { Config } from "../../../config"; +// TODO: we need to update the following dependencies to use types +import AWS = require("aws-sdk"); +// @ts-ignore +import pick = require("lodash.pick"); +// @ts-ignore +import trim = require("lodash.trim"); +// @ts-ignore +import map = require("lodash.map"); +// @ts-ignore +import isEmpty = require("lodash.isempty"); +// @ts-ignore +import reject = require("lodash.reject"); +// @ts-ignore +import assign = require("lodash.assign"); +var awsConfig = require("aws-config"); +// @ts-ignore +import urljoin = require("url-join"); +// @ts-ignore +import mime = require("mime"); +// @ts-ignore +import base64 = require("base64-stream"); +// @ts-ignore +var debug = require("debug")("s3-proxy"); + +// require("simple-errors"); + +// HTTP headers from the AWS request to forward along +var awsForwardHeaders = [ + "content-type", + "last-modified", + "etag", + "cache-control", +]; + +const s3Proxy = function (options: any) { + var s3 = new AWS.S3( + assign(awsConfig(options), pick(options, "endpoint", "s3ForcePathStyle")) + ); + + function listKeys(req: Request, res: Response, next: Function) { + var folderPath = req.originalUrl.substr(req.baseUrl.length); + + var s3Params = { + Bucket: options.bucket, + Prefix: options.prefix ? urljoin(options.prefix, folderPath) : folderPath, + }; + interface RichS3ProxyError extends Error { + prefix?: string; + bucket?: string; + inner?: Error; + } + + debug("list s3 keys at", s3Params.Prefix); + s3.listObjects(s3Params, function (err, data) { + if (err) { + return next(function () { + const richErr: RichS3ProxyError = new Error("Could not read S3 keys"); + richErr.prefix = s3Params.Prefix; + richErr.bucket = s3Params.Bucket; + richErr.inner = err; + return richErr; + }); + } + + var keys: string[] = []; + map(data.Contents, "Key").forEach(function (key: string) { + // Chop off the prefix path + if (key !== s3Params.Prefix) { + if (isEmpty(s3Params.Prefix)) { + keys.push(key); + } else { + keys.push(key.substr(s3Params.Prefix.length)); + } + } + }); + + res.json(keys); + }); + } + + function getObject(req: Request, res: Response, next: Function) { + // This will get everything in the path following the mountpath + var s3Key = decodeURIComponent( + req.originalUrl.substr(req.baseUrl.length + 1) + ); + + // If the key is empty (this occurs if a request comes in for a url ending in '/'), and there is a defaultKey + // option present on options, use the default key + // E.g. if someone wants to route '/' to '/index.html' + if (s3Key === "" && options.defaultKey) s3Key = options.defaultKey; + + // Chop off the querystring, it causes problems with SDK. + var queryIndex = s3Key.indexOf("?"); + if (queryIndex !== -1) { + s3Key = s3Key.substr(0, queryIndex); + } + + // Strip out any path segments that start with a double dash '--'. This is just used + // to force a cache invalidation. + s3Key = reject(s3Key.split("/"), function (segment: string) { + return segment.slice(0, 2) === "--"; + }).join("/"); + + var s3Params: { + Bucket: string; + Key: string; + IfNoneMatch?: string; + VersionId?: string; + }; + s3Params = { + Bucket: options.bucket, + Key: options.prefix ? urljoin(options.prefix, s3Key) : s3Key, + }; + + if ( + req.query && + req.query.VersionId && + typeof req.query.VersionId === "string" + ) { + s3Params.VersionId = req.query.VersionId; + + debug( + `get s3 object with key ${s3Params.Key}${ + s3Params.VersionId ? "and version " + s3Params.VersionId : "" + }` + ); + } + + var base64Encode = req.acceptsEncodings(["base64"]) === "base64"; + + // The IfNoneMatch in S3 won't match if client is requesting base64 encoded response. + if (req.headers["if-none-match"] && !base64Encode) { + s3Params.IfNoneMatch = req.headers["if-none-match"]; + } + + debug("read s3 object", s3Params.Key); + var s3Request = s3.getObject(s3Params); + + // Write a custom http header with the path to the S3 object being proxied + var headerPrefix = req.app.settings.customHttpHeaderPrefix || "x-4front-"; + res.setHeader(headerPrefix + "s3-proxy-key", s3Params.Key); + + s3Request.on("httpHeaders", function (statusCode, s3Headers) { + debug("received httpHeaders"); + + // Get the contentType from the headers + awsForwardHeaders.forEach(function (header) { + var headerValue: string | null = s3Headers[header]; + + if (header === "content-type") { + if (headerValue === "application/octet-stream") { + // If the content-type from S3 is the default "application/octet-stream", + // try and get a more accurate type based on the extension. + headerValue = mime.getType(req.path); + } + } else if (header === "cache-control") { + if (options.overrideCacheControl) { + debug("override cache-control to", options.overrideCacheControl); + headerValue = options.overrideCacheControl; + } else if (!headerValue && options.defaultCacheControl) { + debug("default cache-control to", options.defaultCacheControl); + headerValue = options.defaultCacheControl; + } + } else if (header === "etag" && base64Encode) { + headerValue = '"' + trim(headerValue, '"') + "_base64" + '"'; + } else if (header === "content-length" && base64Encode) { + // Clear out the content-length if we are going to base64 encode the response + headerValue = null; + } + + if (headerValue) { + debug("set header %s=%s", header, headerValue); + res.set(header, headerValue); + } + }); + }); + + debug("read stream %s", s3Params.Key); + + interface HTTPError extends Error { + code?: string | number; + data?: any; + inner?: Error | string; + msg?: string; + } + var readStream = s3Request + .createReadStream() + .on("error", function (err: NodeJS.ErrnoException) { + debug("readStream error"); + // If the code is PreconditionFailed and we passed an IfNoneMatch param + // the object has not changed, so just return a 304 Not Modified response. + if ( + err.code === "NotModified" || + (err.code === "PreconditionFailed" && s3Params.IfNoneMatch) + ) { + return res.status(304).end(); + } + if (err.code === "NoSuchKey") { + return next(function () { + const httpError: HTTPError = new Error("Missing S3 key"); + httpError.code = 404; + httpError.data = { + code: "missingS3Key", + key: s3Params.Key, + }; + return httpError; + }); + } + return next(err); + }); + + if (base64Encode) { + debug("base64 encode response"); + res.setHeader("Content-Encoding", "base64"); + readStream = readStream.pipe(base64.encode()); + } + + readStream.pipe(res); + } + + return function (req: Request, res: Response, next: Function) { + if (req.method !== "GET") return next(); + + //If a request is made to a url ending in '/', but there isn't a default file name, + // return a list of s3 keys. Otherwise, let the getObject() method handle the request + // E.g. if someone wants to route '/' to '/index.html' they should be able to bypass listKeys() + if (!options.defaultKey && req.path.slice(-1) === "/") { + listKeys(req, res, next); + } else { + getObject(req, res, next); + } + }; +}; + +export function createRouter(config: Config["storage"]): express.Router { + const router = express.Router(); + + router.get(["/:bucket/*", "/:bucket"], (req, res, next) => { + const params = req.params as ParamsDictionary; + const s3ProxyObject = s3Proxy({ + bucket: params.bucket, + accessKeyId: config.s3.accessKeyId, + secretAccessKey: config.s3.secretAccessKey, + overrideCacheControl: "max-age=100000", + }); + req.baseUrl = `${req.baseUrl}/${params.bucket}`; + s3ProxyObject(req, res, next); + }); + return router; +} diff --git a/backend/content-providers/local/index.js b/backend/src/routes/api/s3/index.ts similarity index 94% rename from backend/content-providers/local/index.js rename to backend/src/routes/api/s3/index.ts index 3d7cd159..bf7f34db 100644 --- a/backend/content-providers/local/index.js +++ b/backend/src/routes/api/s3/index.ts @@ -1,5 +1,3 @@ -// @flow - import { createRouter as createContentsRouter } from "./contents"; import { createRouter as createFilesRouter } from "./files"; diff --git a/backend/src/routes/index.ts b/backend/src/routes/index.ts new file mode 100644 index 00000000..63602e20 --- /dev/null +++ b/backend/src/routes/index.ts @@ -0,0 +1,29 @@ +import express = require("express"); + +import { Config } from "../config"; + +const createAPIRouter = require("./api"); + +function createRouter(config: Config): express.Router { + const contentsProvider = require("../content-providers/local"); + const s3ContentsProvider = require("./api/s3"); + + const apiRouter = createAPIRouter({ + contents: contentsProvider.createContentsRouter(config.storage), + s3contents: s3ContentsProvider.createContentsRouter(config.s3storage) + }); + + const router = express.Router(); + + router.use("/api", apiRouter); + router.use("/files", contentsProvider.createFilesRouter(config.storage)); + router.use( + "/s3-files", + s3ContentsProvider.createFilesRouter(config.s3storage) + ); + + return router; +} + +// Keeping the singleton on the export to make it work in-place right now +module.exports = createRouter(require("../config")); diff --git a/backend/src/server.ts b/backend/src/server.ts new file mode 100644 index 00000000..89f46fc2 --- /dev/null +++ b/backend/src/server.ts @@ -0,0 +1,114 @@ +import express from "express"; + +import { createNextApp } from "../../frontend"; + +import { parse } from "url"; + +import * as http from "http"; + +const path = require("path"), + morgan = require("morgan"), + config = require("./config"), + Log = require("log"), + log = new Log("info"); + +import { ParamsDictionary } from "express-serve-static-core"; + +export async function createServer(): Promise { + const frontend = createNextApp(); + + // Wait for the next.js handlers to be ready + await frontend.prepare(); + + const app = express(); + app.use(morgan("common")); + + log.info(`Node env: ${config.nodeEnv}`); + + const router = require("./routes"); + + /** + * + * /view/ router + * + * On some filetypes it will serve the next.js app and on others it will + * redirect to the literal file. This is to allow for resolving relative + * assets like images within notebooks. + */ + const suffixRegex = /(?:\.([^.]+))?$/; + const renderSuffixes = new Set(["ipynb", "html", "json", "md", "rmd"]); + const renderAccepts = new Set(["text/html", "application/xhtml+xml"]); + const generateViewHandler = (handlerPath: string, fallbackPath: string) => { + const viewHandler = (req: express.Request, res: express.Response) => { + const presuffix = suffixRegex.exec(req.path); + + if (!presuffix) { + return null; + } + + const suffix = (presuffix[1] || "").toLowerCase(); + const accepts = (req.headers.accept || "").split(","); + + if ( + // If one of our suffixes is a renderable item + renderSuffixes.has(suffix) || + // If the file is requested as `text/html` first and foremost, we'll also + // render our file viewer + renderAccepts.has(accepts[0]) || + renderAccepts.has(accepts[1]) + ) { + const params = req.params as ParamsDictionary; + const { pathname, query } = parse(req.url, true); + const viewPath = params["0"] || "/"; + const q = Object.assign({}, { viewPath }, query); + return frontend.render(req, res, `/${handlerPath}`, q); + } + + const newPath = req.path.replace( + RegExp(`^/${handlerPath}`), + `/${fallbackPath}` + ); + res.redirect(newPath); + return; + }; + return viewHandler; + }; + + /** + * Express middleware for letting our next.js frontend do the handling + */ + const passToNext = (req: express.Request, res: express.Response) => { + return frontend.getRequestHandler()(req, res); + }; + const viewHandler = generateViewHandler("view", "files"); + const s3ViewHandler = generateViewHandler("s3-view", "s3-files"); + const s3BasePathRedirect = (req: express.Request, res: express.Response) => { + res.redirect(`/s3-view/${config.s3storage.s3.params.Bucket}`); + }; + const s3ArtifactRedirectHandler = ( + req: express.Request, + res: express.Response + ) => { + res.redirect( + `/s3-view/${config.s3storage.s3.params.Bucket}/${config.s3storage.artifactPrefix}` + ); + }; + + router.get(["/s3-view", "/s3-view/"], s3BasePathRedirect); + router.get(["/s3-artifacts", "/s3-artifacts/"], s3ArtifactRedirectHandler); + + router.get(["/view", "/view*"], viewHandler); + router.get(["/s3-view*"], s3ViewHandler); + router.get("*", passToNext); + + // TODO: Leaving this here for the eventual baseURL handling + const baseURI = "/"; + app.use(baseURI, router); + app.use(passToNext); + + const server = http.createServer(app); + + return new Promise(accept => { + server.listen(config.port, () => accept(server)); + }); +} diff --git a/backend/src/types.ts b/backend/src/types.ts new file mode 100644 index 00000000..808d9342 --- /dev/null +++ b/backend/src/types.ts @@ -0,0 +1,48 @@ +export type ContentError = { + reason: string; + message: string; +}; + +export type DirectoryContent = { + type: "directory"; + mimetype: null; + content: null | Array; // Technically content-free content ;) + + name: string; + path: string; + + created: Date; + last_modified: Date; + writable: boolean; + format: "json"; +}; + +export type NotebookContent = { + type: "notebook"; + mimetype: null; + content: null | Object; + + name: string; + path: string; + + created: Date; + last_modified: Date; + writable: boolean; + format: "json"; +}; + +export type FileContent = { + type: "file"; + mimetype: null | string; + content: null | string; + + name: string; + path: string; + + created: Date; + last_modified: Date; + writable: boolean; + format: null | "text" | "base64" | "binary"; // binary is not technically part of Jupyter API, though some old commuter code returns this +}; + +export type Content = DirectoryContent | FileContent | NotebookContent; diff --git a/backend/tsconfig.json b/backend/tsconfig.json new file mode 100644 index 00000000..da351acd --- /dev/null +++ b/backend/tsconfig.json @@ -0,0 +1,20 @@ +{ + "include": ["src/**/*"], + "exclude": ["dist"], + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "module": "commonjs", + "esModuleInterop": true, + "target": "es6", + "noImplicitAny": true, + "moduleResolution": "node", + "sourceMap": true, + "lib": ["es2015"], + "rootDir": "src", + "outDir": "dist", + "strict": true, + "declaration": true, + "inlineSources": true, + "types": ["node"] + } +} diff --git a/bin.js b/bin.js old mode 100755 new mode 100644 index 052112c2..e14c999e --- a/bin.js +++ b/bin.js @@ -2,4 +2,4 @@ process.env.NODE_ENV = "production"; -require("./lib/index.js"); +require("./backend/dist/index.js"); diff --git a/components/browse-header.js b/components/browse-header.js deleted file mode 100644 index 90130d04..00000000 --- a/components/browse-header.js +++ /dev/null @@ -1,172 +0,0 @@ -// @flow -import * as React from "react"; -import Router from "next/router"; -import NextLink from "next/link"; -import { trim } from "lodash"; - -import { theme } from "../theme"; - -// Convert simple links to next style href + as -const Link = ({ to, children, basepath }) => ( - - {children} - -); - -class BrowseHeader extends React.Component<*> { - props: { - path: string, - basepath: string, - type: string, - commuterExecuteLink: ?string - }; - - static defaultProps = { - active: "view" - }; - - handleItemClick = (e: SyntheticEvent<*>, { name }: { name: string }) => { - Router.push(name); - }; - - render() { - const { path, basepath } = this.props; - let paths = trim(path, "/").split("/"); - // Empty path to start off - if (paths.length === 1 && paths[0] === "") { - paths = []; - } - - // TODO: Ensure this works under an app subpath (which is not implemented yet) - const filePath = basepath.replace(/view\/?/, "files/") + path; - - // const serverSide = typeof document === "undefined"; - const viewingNotebook = filePath.endsWith(".ipynb"); - - return ( - - ); - } -} - -export default BrowseHeader; diff --git a/components/contents/html.js b/components/contents/html.js deleted file mode 100644 index a99e51fc..00000000 --- a/components/contents/html.js +++ /dev/null @@ -1,41 +0,0 @@ -// @flow -import * as React from "react"; - -export default class HTMLView extends React.Component<*> { - ifr: ?HTMLIFrameElement; - - shouldComponentUpdate() { - return false; - } - - render() { - return ( -
-