Skip to content

Commit

Permalink
Merge branch 'charon-grab-bag'
Browse files Browse the repository at this point in the history
  • Loading branch information
tsibley committed Apr 9, 2021
2 parents 7cf226d + 10e081f commit d733806
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 127 deletions.
4 changes: 2 additions & 2 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Each handler is defined in an file of the same name within `src`.

### Autorization

Each handler is responsable for checking authorization by calling a `Source` class method like so:
Each handler is responsible for checking authorization by calling a `Source` class method like so:
```js
if (!source.visibleToUser(req.user)) {
return helpers.unauthorized(req, res);
Expand All @@ -24,4 +24,4 @@ if (!source.visibleToUser(req.user)) {
## Tests

There are a number of smoke-tests for these API calls.
See `tests/smoke-test/` for details and run via `npm run smoke-test:ci`
See `tests/smoke-test/` for details and run via `npm run smoke-test:ci`
45 changes: 27 additions & 18 deletions src/getDataset.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,20 +55,25 @@ const requestV1Dataset = async (metaJsonUrl, treeJsonUrl) => {
* If neither the v1 nor the v2 dataset fetch / parse is successful,
* then the promise will reject.
*/
const requestMainDataset = async (res, fetchUrls) => {
const requestMainDataset = async (res, dataset) => {
const main = dataset.urlFor("main");

return new Promise((resolve, reject) => {
/* try to stream the (v2+) dataset JSON as the response */
const req = request
.get(fetchUrls.main)
.get(main)
.on('error', (err) => reject(err))
.on("response", async (response) => { // eslint-disable-line consistent-return
if (response.statusCode === 200) {
utils.verbose(`Successfully streaming ${fetchUrls.main}.`);
utils.verbose(`Successfully streaming ${main}.`);
req.pipe(res);
return resolve();
}
utils.verbose(`The request for ${fetchUrls.main} returned ${response.statusCode}.`);
const [success, dataToReturn] = await requestV1Dataset(fetchUrls.meta, fetchUrls.tree);
utils.verbose(`The request for ${main} returned ${response.statusCode}.`);

const meta = dataset.urlFor("meta");
const tree = dataset.urlFor("tree");
const [success, dataToReturn] = await requestV1Dataset(meta, tree);
if (success) {
res.send(dataToReturn);
return resolve();
Expand Down Expand Up @@ -132,37 +137,41 @@ const getDataset = async (req, res) => {
return res.status(400).send(`Couldn't parse the url "${query.prefix}"`);
}

const {source, dataset, fetchUrls, auspiceDisplayUrl} = datasetInfo;
const {source, dataset, resolvedPrefix} = datasetInfo;

// Authorization
if (!source.visibleToUser(req.user)) {
return helpers.unauthorized(req, res);
}

const baseUrl = req.url.split(query.prefix)[0];
let redirectUrl = baseUrl + '/' + auspiceDisplayUrl;
if (query.type) {
redirectUrl += `&type=${query.type}`;
}
/* If we got a partial prefix and resolved it into a full one, redirect to
* that. Auspice will notice and update its displayed URL appropriately.
*/
if (resolvedPrefix !== helpers.canonicalizePrefix(query.prefix)) {
// A absolute base is required but we won't use it, so use something bogus.
const resolvedUrl = new URL(req.originalUrl, "http://x");
resolvedUrl.searchParams.set("prefix", resolvedPrefix);

const relativeResolvedUrl = resolvedUrl.pathname + resolvedUrl.search;

if (redirectUrl !== req.url) {
utils.log(`Redirecting client to: ${redirectUrl}`);
res.redirect(redirectUrl);
utils.log(`Redirecting client to resolved dataset URL: ${relativeResolvedUrl}`);
res.redirect(relativeResolvedUrl);
return undefined;
}

if (fetchUrls.additional) {
if (query.type) {
const url = dataset.urlFor(query.type);
try {
await requestCertainFileType(res, req, fetchUrls.additional, query);
await requestCertainFileType(res, req, url, query);
} catch (err) {
if (err instanceof ResourceNotFoundError) {
return res.status(404).send("The requested dataset does not exist.");
}
return helpers.handle500Error(res, `Couldn't fetch JSON: ${fetchUrls.additional}`, err.message);
return helpers.handle500Error(res, `Couldn't fetch JSON: ${url}`, err.message);
}
} else {
try {
await requestMainDataset(res, fetchUrls);
await requestMainDataset(res, dataset);
} catch (err) {
if (dataset.isRequestValidWithoutDataset) {
utils.verbose("Request is valid, but no dataset available. Returning 204.");
Expand Down
95 changes: 27 additions & 68 deletions src/getDatasetHelpers.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,13 @@ const splitPrefixIntoParts = (prefix) => {
switch (prefixParts[0]) {
case "community":
case "staging":
case "fetch":
sourceName = prefixParts.shift();
break;
case "groups":
prefixParts.shift();
sourceName = prefixParts.shift();
break;
case "fetch":
/* the `/fetch/ URLs are backed by the `UrlDefinedSource` as `FetchSource` was too confusing */
prefixParts.shift();
sourceName = "urlDefined";
break;
default:
sourceName = "core";
break;
Expand All @@ -68,6 +64,11 @@ const splitPrefixIntoParts = (prefix) => {
source = new Source(...prefixParts.splice(0, 2));
break;

// UrlDefined source requires a URL authority part
case "fetch":
source = new Source(prefixParts.shift());
break;

default:
source = new Source();
}
Expand All @@ -90,11 +91,9 @@ const joinPartsIntoPrefix = ({source, prefixParts, isNarrative = false}) => {
break;
case "community":
case "staging":
case "fetch":
leadingParts.push(source.name);
break;
case "urlDefined":
leadingParts.push("fetch");
break;
default:
leadingParts.push("groups", source.name);
}
Expand All @@ -108,6 +107,12 @@ const joinPartsIntoPrefix = ({source, prefixParts, isNarrative = false}) => {
case "community":
leadingParts.push(source.owner, source.repoNameWithBranch);
break;

// UrlDefined source requires a URL authority part
case "fetch":
leadingParts.push(source.authority);
break;

// no default
}

Expand Down Expand Up @@ -153,84 +158,38 @@ const correctPrefixFromAvailable = (sourceName, prefixParts) => {
};


const guessTreeName = (prefixParts) => {
const guesses = ["HA", "NA", "PB1", "PB2", "PA", "NP", "NS", "MP", "L", "S"];
for (const part of prefixParts) {
if (guesses.indexOf(part.toUpperCase()) !== -1) return part;
}
return undefined;
};

/* Parse the prefix (normally URL) and decide which URLs to fetch etc
* The prefix is case sensitive
/* Parse the prefix (a path-like string specifying a source + dataset path)
* with resolving of partial prefixes. Prefixes are case-sensitive.
*/
const parsePrefix = (prefix, otherQueries) => {
const fetchUrls = {};
const parsePrefix = (prefix) => {
let {source, prefixParts} = splitPrefixIntoParts(prefix);

/* Does the URL specify two trees?
*
* If so, we need to extract the two tree names and massage the prefixParts
* to only include the first.
*/
let treeName, secondTreeName;
const treeSplitChar = /(?<!http[s]?):/;
for (let i=0; i<prefixParts.length; i++) {
if (prefixParts[i].search(treeSplitChar) !== -1) {
[treeName, secondTreeName] = prefixParts[i].split(treeSplitChar);
prefixParts[i] = treeName; // only use the first tree from now on
break;
}
}
if (!secondTreeName && otherQueries.deprecatedSecondTree) {
secondTreeName = otherQueries.deprecatedSecondTree;
}

// Expand partial prefixes. This would be cleaner if integerated into the
// Source classes.
prefixParts = correctPrefixFromAvailable(source.name, prefixParts);

if (!treeName) {
utils.verbose("Guessing tree name -- this should be improved");
treeName = guessTreeName(prefixParts);
}

// The URL to be displayed in Auspice, tweaked below if necessary
let auspiceDisplayUrl = joinPartsIntoPrefix({source, prefixParts});
// The resolved prefix, possibly "corrected" above, which we want to use for
// display.
const resolvedPrefix = joinPartsIntoPrefix({source, prefixParts});

// Get the server fetch URLs
const dataset = source.dataset(prefixParts);

fetchUrls.main = dataset.urlFor("main");
fetchUrls.tree = dataset.urlFor("tree");
fetchUrls.meta = dataset.urlFor("meta");
return ({source, dataset, resolvedPrefix});

if (secondTreeName) {
const idxOfTree = prefixParts.indexOf(treeName);
const secondTreePrefixParts = prefixParts.slice();
secondTreePrefixParts[idxOfTree] = secondTreeName;

const secondDataset = source.dataset(secondTreePrefixParts);
fetchUrls.secondTree = secondDataset.urlFor("tree");

const re = new RegExp(`\\/${treeName}(/|$)`); // note the double escape for special char
auspiceDisplayUrl = auspiceDisplayUrl.replace(re, `/${treeName}:${secondTreeName}/`);
}
auspiceDisplayUrl = auspiceDisplayUrl.replace(/\/$/, ''); // remove any trailing slash

if (otherQueries.type) {
fetchUrls.additional = dataset.urlFor(otherQueries.type);
}
};

return ({fetchUrls, auspiceDisplayUrl, treeName, secondTreeName, source, dataset});

};
/* Round-trip prefix through split/join to canonicalize it for comparison.
*/
const canonicalizePrefix = (prefix) =>
joinPartsIntoPrefix(splitPrefixIntoParts(prefix));


module.exports = {
splitPrefixIntoParts,
joinPartsIntoPrefix,
handle500Error,
unauthorized,
parsePrefix
parsePrefix,
canonicalizePrefix,
};
64 changes: 26 additions & 38 deletions src/sources.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ class Source {
return new Narrative(this, pathParts);
}

// The computation of these globals should move here.
// eslint-disable-next-line no-unused-vars
secondTreeOptions(path) {
return (global.availableDatasets.secondTreeOptions[this.name] || {})[path] || [];
return [];
}

availableDatasets() {
Expand Down Expand Up @@ -128,6 +128,10 @@ class CoreSource extends Source {
}

// The computation of these globals should move here.
secondTreeOptions(path) {
return (global.availableDatasets.secondTreeOptions[this.name] || {})[path] || [];
}

availableDatasets() {
return global.availableDatasets[this.name] || [];
}
Expand Down Expand Up @@ -296,12 +300,19 @@ class CommunityNarrative extends Narrative {


class UrlDefinedSource extends Source {
static get _name() { return "fetch"; }

static get _name() { return "urlDefined"; }
get baseUrl() {
throw new Error("UrlDefinedSource does not use `this.baseUrl`");
constructor(authority) {
super();

if (!authority) throw new Error(`Cannot construct a ${this.constructor.name} without a URL authority`);

this.authority = authority;
}

get baseUrl() {
return `https://${this.authority}`;
}
dataset(pathParts) {
return new UrlDefinedDataset(this, pathParts);
}
Expand All @@ -316,49 +327,26 @@ class UrlDefinedSource extends Source {
}

class UrlDefinedDataset extends Dataset {
get baseParts() {
return this.pathParts;
}
get isRequestValidWithoutDataset() {
return false;
}
baseNameFor(type) {
// mandate https
const datasetUrl = "https://" + this.baseParts.join("/");
if (type==="main") {
return datasetUrl;
}
// if the request is for A.json, then return A_<type>.json.
if (datasetUrl.endsWith(".json")) {
return `${datasetUrl.replace(/\.json$/, '')}_${type}.json`;
const baseName = this.baseParts.join("/");

if (type === "main") {
return baseName;
}
// if the request if for B, where B doesn't end with `.json`, then return B_<type>
return `${datasetUrl}_${type}`;
}
urlFor(type) {
// when `parsePrefix()` runs (which it does for each /charon/getDataset API request), it preemtively defines
// a `urlFor` tree, meta and main types. For `UrlDefinedDataset`s we can only serve v2 datasets, but be aware
// the `urlFor` function is still called for tree + meta "types".
if (type==="tree" || type==="meta") return undefined;
const url = new URL(this.baseNameFor(type));
return url.toString();

return baseName.endsWith(".json")
? `${baseName.replace(/\.json$/, '')}_${type}.json`
: `${baseName}_${type}`;
}
}

class UrlDefinedNarrative extends Narrative {
get baseParts() {
return this.pathParts;
}
get baseName() {
// mandate https
return "https://" + this.baseParts.join("/");
}
url() {
const url = new URL(this.baseName);
return url.toString();
return this.baseParts.join("/");
}
}


class S3Source extends Source {
get bucket() {
throw new InvalidSourceImplementation("bucket() must be implemented by subclasses");
Expand Down
2 changes: 1 addition & 1 deletion test/smoke-test/auspice_client_requests.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"url": "/charon/getDataset?prefix=/flu",
"expectStatusCode": 302,
"responseIsJson": false,
"redirectsTo": "/charon/getDataset?prefix=/flu/seasonal/h3n2/ha/2y"
"redirectsTo": "/charon/getDataset?prefix=flu%2Fseasonal%2Fh3n2%2Fha%2F2y"
},
{
"name": "Check that the main getAvailable API call returns a JSON",
Expand Down

0 comments on commit d733806

Please sign in to comment.