1
0
mirror of https://gitlab.com/timvisee/send.git synced 2024-11-10 13:13:00 +01:00

Merge pull request #766 from mozilla/frontend-tests

Some frontend unit tests [WIP]
This commit is contained in:
Danny Coates 2018-02-25 16:53:02 -08:00 committed by GitHub
commit e79bacd268
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 3639 additions and 892 deletions

View File

@ -6,4 +6,5 @@ assets
docs
public
test
coverage
coverage
.nyc_output

3
.nsprc
View File

@ -1,3 +0,0 @@
{
"exceptions": ["https://nodesecurity.io/advisories/534"]
}

View File

@ -1,2 +1,3 @@
dist
assets/*.js
coverage

View File

@ -91,10 +91,15 @@ export async function setPassword(id, owner_token, keychain) {
return response.ok;
}
export function uploadFile(encrypted, metadata, verifierB64, keychain) {
export function uploadFile(
encrypted,
metadata,
verifierB64,
keychain,
onprogress
) {
const xhr = new XMLHttpRequest();
const upload = {
onprogress: function() {},
cancel: function() {
xhr.abort();
},
@ -122,7 +127,7 @@ export function uploadFile(encrypted, metadata, verifierB64, keychain) {
fd.append('data', blob);
xhr.upload.addEventListener('progress', function(event) {
if (event.lengthComputable) {
upload.onprogress([event.loaded, event.total]);
onprogress([event.loaded, event.total]);
}
});
xhr.open('post', '/api/upload', true);
@ -132,82 +137,63 @@ export function uploadFile(encrypted, metadata, verifierB64, keychain) {
return upload;
}
function download(id, keychain) {
function download(id, keychain, onprogress, canceller) {
const xhr = new XMLHttpRequest();
const download = {
onprogress: function() {},
cancel: function() {
xhr.abort();
},
result: new Promise(async function(resolve, reject) {
xhr.addEventListener('loadend', function() {
const authHeader = xhr.getResponseHeader('WWW-Authenticate');
if (authHeader) {
keychain.nonce = parseNonce(authHeader);
}
if (xhr.status === 404) {
return reject(new Error('notfound'));
}
if (xhr.status !== 200) {
return reject(new Error(xhr.status));
}
const blob = new Blob([xhr.response]);
const fileReader = new FileReader();
fileReader.readAsArrayBuffer(blob);
fileReader.onload = function() {
resolve(this.result);
};
});
xhr.addEventListener('progress', function(event) {
if (event.lengthComputable && event.target.status === 200) {
download.onprogress([event.loaded, event.total]);
}
});
const auth = await keychain.authHeader();
xhr.open('get', `/api/download/${id}`);
xhr.setRequestHeader('Authorization', auth);
xhr.responseType = 'blob';
xhr.send();
})
canceller.oncancel = function() {
xhr.abort();
};
return new Promise(async function(resolve, reject) {
xhr.addEventListener('loadend', function() {
canceller.oncancel = function() {};
const authHeader = xhr.getResponseHeader('WWW-Authenticate');
if (authHeader) {
keychain.nonce = parseNonce(authHeader);
}
if (xhr.status !== 200) {
return reject(new Error(xhr.status));
}
return download;
const blob = new Blob([xhr.response]);
const fileReader = new FileReader();
fileReader.readAsArrayBuffer(blob);
fileReader.onload = function() {
resolve(this.result);
};
});
xhr.addEventListener('progress', function(event) {
if (event.lengthComputable && event.target.status === 200) {
onprogress([event.loaded, event.total]);
}
});
const auth = await keychain.authHeader();
xhr.open('get', `/api/download/${id}`);
xhr.setRequestHeader('Authorization', auth);
xhr.responseType = 'blob';
xhr.send();
});
}
async function tryDownload(id, keychain, onprogress, tries = 1) {
const dl = download(id, keychain);
dl.onprogress = onprogress;
async function tryDownload(id, keychain, onprogress, canceller, tries = 1) {
try {
const result = await dl.result;
const result = await download(id, keychain, onprogress, canceller);
return result;
} catch (e) {
if (e.message === '401' && --tries > 0) {
return tryDownload(id, keychain, onprogress, tries);
return tryDownload(id, keychain, onprogress, canceller, tries);
}
throw e;
}
}
export function downloadFile(id, keychain) {
let cancelled = false;
function updateProgress(p) {
if (cancelled) {
// This is a bit of a hack
// We piggyback off of the progress event as a chance to cancel.
// Otherwise wiring the xhr abort up while allowing retries
// gets pretty nasty.
// 'this' here is the object returned by download(id, keychain)
return this.cancel();
}
dl.onprogress(p);
}
const dl = {
onprogress: function() {},
cancel: function() {
cancelled = true;
},
result: tryDownload(id, keychain, updateProgress, 2)
export function downloadFile(id, keychain, onprogress) {
const canceller = {
oncancel: function() {} // download() sets this
};
function cancel() {
canceller.oncancel();
}
return {
cancel,
result: tryDownload(id, keychain, onprogress, canceller, 2)
};
return dl;
}

View File

@ -149,8 +149,6 @@ export default function(state, emitter) {
const receiver = new FileReceiver(file);
try {
await receiver.getMetadata();
receiver.on('progress', updateProgress);
receiver.on('decrypting', render);
state.transfer = receiver;
} catch (e) {
if (e.message === '401') {
@ -164,14 +162,16 @@ export default function(state, emitter) {
});
emitter.on('download', async file => {
state.transfer.on('progress', render);
state.transfer.on('progress', updateProgress);
state.transfer.on('decrypting', render);
const links = openLinksInNewTab();
const size = file.size;
try {
const start = Date.now();
metrics.startedDownload({ size: file.size, ttl: file.ttl });
await state.transfer.download();
const dl = state.transfer.download();
render();
await dl;
const time = Date.now() - start;
const speed = size / (time / 1000);
await delay(1000);
@ -188,7 +188,7 @@ export default function(state, emitter) {
}
console.error(err);
state.transfer = null;
const location = err.message === 'notfound' ? '/404' : '/error';
const location = err.message === '404' ? '/404' : '/error';
if (location === '/error') {
state.raven.captureException(err);
metrics.stoppedDownload({ size, err });

View File

@ -30,64 +30,55 @@ export default class FileReceiver extends Nanobus {
}
cancel() {
this.cancelled = true;
if (this.fileDownload) {
this.fileDownload.cancel();
if (this.downloadRequest) {
this.downloadRequest.cancel();
}
}
reset() {
this.fileDownload = null;
this.msg = 'fileSizeProgress';
this.state = 'initialized';
this.progress = [0, 1];
this.cancelled = false;
}
async getMetadata() {
const meta = await metadata(this.fileInfo.id, this.keychain);
if (meta) {
this.keychain.setIV(meta.iv);
this.fileInfo.name = meta.name;
this.fileInfo.type = meta.type;
this.fileInfo.iv = meta.iv;
this.fileInfo.size = meta.size;
this.state = 'ready';
return;
}
this.state = 'invalid';
return;
this.keychain.setIV(meta.iv);
this.fileInfo.name = meta.name;
this.fileInfo.type = meta.type;
this.fileInfo.iv = meta.iv;
this.fileInfo.size = meta.size;
this.state = 'ready';
}
async download() {
async download(noSave = false) {
this.state = 'downloading';
this.emit('progress', this.progress);
try {
const download = await downloadFile(this.fileInfo.id, this.keychain);
download.onprogress = p => {
this.downloadRequest = await downloadFile(
this.fileInfo.id,
this.keychain,
p => {
this.progress = p;
this.emit('progress', p);
};
this.fileDownload = download;
const ciphertext = await download.result;
this.fileDownload = null;
this.emit('progress');
}
);
try {
const ciphertext = await this.downloadRequest.result;
this.downloadRequest = null;
this.msg = 'decryptingFile';
this.state = 'decrypting';
this.emit('decrypting');
const plaintext = await this.keychain.decryptFile(ciphertext);
if (this.cancelled) {
throw new Error(0);
if (!noSave) {
await saveFile({
plaintext,
name: decodeURIComponent(this.fileInfo.name),
type: this.fileInfo.type
});
}
await saveFile({
plaintext,
name: decodeURIComponent(this.fileInfo.name),
type: this.fileInfo.type
});
this.msg = 'downloadFinish';
this.state = 'complete';
return;
} catch (e) {
this.state = 'invalid';
this.downloadRequest = null;
throw e;
}
}

View File

@ -9,11 +9,8 @@ export default class FileSender extends Nanobus {
constructor(file) {
super('FileSender');
this.file = file;
this.uploadRequest = null;
this.msg = 'importingFile';
this.progress = [0, 1];
this.cancelled = false;
this.keychain = new Keychain();
this.reset();
}
get progressRatio() {
@ -31,6 +28,13 @@ export default class FileSender extends Nanobus {
};
}
reset() {
this.uploadRequest = null;
this.msg = 'importingFile';
this.progress = [0, 1];
this.cancelled = false;
}
cancel() {
this.cancelled = true;
if (this.uploadRequest) {
@ -71,13 +75,13 @@ export default class FileSender extends Nanobus {
encrypted,
metadata,
authKeyB64,
this.keychain
this.keychain,
p => {
this.progress = p;
this.emit('progress', p);
}
);
this.msg = 'fileSizeProgress';
this.uploadRequest.onprogress = p => {
this.progress = p;
this.emit('progress', p);
};
try {
const result = await this.uploadRequest.result;
const time = Date.now() - start;

View File

@ -16,7 +16,7 @@ export default class OwnedFile {
this.ownerToken = obj.ownerToken;
this.dlimit = obj.dlimit || 1;
this.dtotal = obj.dtotal || 0;
this.keychain = new Keychain(obj.secretKey);
this.keychain = new Keychain(obj.secretKey, obj.nonce);
this._hasPassword = !!obj.hasPassword;
}
@ -59,6 +59,7 @@ export default class OwnedFile {
if (e.message === '404') {
this.dtotal = this.dlimit;
}
// ignore other errors
}
}

View File

@ -1,28 +1,97 @@
machine:
node:
version: 8
services:
- docker
- redis
deployment:
latest:
branch: master
commands:
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- docker build -t mozilla/send:latest .
- docker push mozilla/send:latest
tags:
tag: /.*/
owner: mozilla
commands:
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- docker build -t mozilla/send:$CIRCLE_TAG .
- docker push mozilla/send:$CIRCLE_TAG
test:
override:
- npm run build
- npm run lint
- npm run test:ci
- nsp check
version: 2.0
jobs:
build:
docker:
- image: circleci/node:8
steps:
- checkout
- restore_cache:
key: send-{{ checksum "package-lock.json" }}
- run: npm install
- save_cache:
key: send-{{ checksum "package-lock.json" }}
paths:
- node_modules
- run: npm run build
- persist_to_workspace:
root: .
paths:
- ./*
test:
docker:
- image: circleci/node:8-browsers
steps:
- checkout
- restore_cache:
key: send-{{ checksum "package-lock.json" }}
- run: npm install
- save_cache:
key: send-{{ checksum "package-lock.json" }}
paths:
- node_modules
- run: npm run check
- run: npm run lint
- run: npm test
deploy_dev:
machine: true
steps:
- attach_workspace:
at: .
- run: docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- run: docker build -t mozilla/send:latest .
- run: docker push mozilla/send:latest
deploy_stage:
machine: true
steps:
- attach_workspace:
at: .
- run: docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- run: docker build -t mozilla/send:$CIRCLE_TAG .
- run: docker push mozilla/send:$CIRCLE_TAG
workflows:
version: 2
test_pr:
jobs:
- test:
filters:
branches:
ignore: master
build_and_deploy_dev:
jobs:
- build:
filters:
branches:
only: master
tags:
ignore: /^v.*/
- deploy_latest:
requires:
- build
filters:
branches:
only: master
tags:
ignore: /^v.*/
build_and_deploy_stage:
jobs:
- build:
filters:
branches:
only: master
tags:
only: /^v.*/
- test:
filters:
branches:
only: master
tags:
only: /^v.*/
- deploy_stage:
requires:
- build
- test
filters:
branches:
only: master
tags:
only: /^v.*/

3410
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,8 @@
"private": true,
"scripts": {
"precommit": "lint-staged",
"prepush": "npm test",
"check": "nsp check",
"clean": "rimraf dist",
"build": "npm run clean && webpack -p",
"lint": "npm-run-all lint:*",
@ -23,8 +25,9 @@
"changelog": "github-changes -o mozilla -r send --only-pulls --use-commit-body --no-merges",
"contributors": "git shortlog -s | awk -F\\t '{print $2}' > CONTRIBUTORS",
"release": "npm-run-all contributors changelog",
"test": "mocha test/unit",
"test:ci": "nyc mocha --reporter=min test/unit",
"test": "npm-run-all test:*",
"test:backend": "nyc mocha --reporter=min test/unit",
"test:frontend": "cross-env NODE_ENV=development node test/frontend/runner.js && nyc report --reporter=html",
"start": "cross-env NODE_ENV=development webpack-dev-server",
"prod": "node server/prod.js",
"cover": "nyc --reporter=html mocha test/unit"
@ -43,7 +46,7 @@
},
"nyc": {
"reporter": [
"text-summary"
"text"
],
"cache": true
},
@ -52,31 +55,32 @@
},
"devDependencies": {
"babel-core": "^6.26.0",
"babel-loader": "^7.1.2",
"babel-loader": "^7.1.3",
"babel-plugin-istanbul": "^4.1.5",
"babel-plugin-yo-yoify": "^1.0.2",
"babel-preset-env": "^1.6.1",
"babel-preset-es2015": "^6.24.1",
"babel-preset-stage-2": "^6.24.1",
"babel-preset-stage-3": "^6.24.1",
"base64-js": "^1.2.3",
"copy-webpack-plugin": "^4.4.1",
"copy-webpack-plugin": "^4.4.2",
"cross-env": "^5.1.3",
"css-loader": "^0.28.9",
"css-loader": "^0.28.10",
"css-mqpacker": "^6.0.2",
"eslint": "^4.18.0",
"eslint": "^4.18.1",
"eslint-plugin-mocha": "^4.11.0",
"eslint-plugin-node": "^6.0.0",
"eslint-plugin-node": "^6.0.1",
"eslint-plugin-security": "^1.4.0",
"expose-loader": "^0.7.4",
"extract-loader": "^1.0.2",
"extract-text-webpack-plugin": "^3.0.2",
"file-loader": "^1.1.7",
"file-loader": "^1.1.9",
"fluent-intl-polyfill": "^0.1.0",
"git-rev-sync": "^1.10.0",
"github-changes": "^1.1.2",
"html-loader": "^0.5.5",
"husky": "^0.14.3",
"lint-staged": "^6.1.1",
"lint-staged": "^7.0.0",
"mocha": "^5.0.0",
"nanobus": "^4.3.2",
"nanotiming": "^7.3.0",
@ -88,28 +92,27 @@
"postcss-loader": "^2.1.0",
"prettier": "^1.10.2",
"proxyquire": "^1.8.0",
"puppeteer": "^1.1.1",
"raven-js": "^3.22.2",
"redis-mock": "^0.21.0",
"require-from-string": "^2.0.1",
"rimraf": "^2.6.2",
"selenium-webdriver": "^3.6.0",
"sinon": "^4.3.0",
"sinon": "^4.4.2",
"string-hash": "^1.1.3",
"stylelint": "^9.0.0",
"stylelint": "^9.1.1",
"stylelint-config-standard": "^18.1.0",
"stylelint-no-unsupported-browser-features": "^2.0.0",
"supertest": "^3.0.0",
"testpilot-ga": "^0.3.0",
"val-loader": "^1.1.0",
"webpack": "^3.11.0",
"webpack-dev-middleware": "^2.0.6",
"webpack-dev-server": "2.9.1",
"webpack-manifest-plugin": "^1.3.2",
"webpack-unassert-loader": "^1.2.0"
},
"dependencies": {
"aws-sdk": "^2.198.0",
"aws-sdk": "^2.202.0",
"babel-polyfill": "^6.26.0",
"body-parser": "^1.18.2",
"choo": "^6.7.0",
"cldr-core": "^32.0.0",
"connect-busboy": "0.0.2",

View File

@ -2,11 +2,13 @@ const assets = require('../common/assets');
const locales = require('../common/locales');
const routes = require('./routes');
const pages = require('./routes/pages');
const tests = require('../test/frontend/routes');
module.exports = function(app, devServer) {
assets.setMiddleware(devServer.middleware);
locales.setMiddleware(devServer.middleware);
routes(app);
tests(app);
// webpack-dev-server routes haven't been added yet
// so wait for next tick to add 404 handler
process.nextTick(() => app.use(pages.notfound));

View File

@ -1,6 +1,6 @@
const express = require('express');
const busboy = require('connect-busboy');
const helmet = require('helmet');
const bodyParser = require('body-parser');
const storage = require('../storage');
const config = require('../config');
const auth = require('../middleware/auth');
@ -55,7 +55,7 @@ module.exports = function(app) {
res.set('Cache-Control', 'no-cache');
next();
});
app.use(bodyParser.json());
app.use(express.json());
app.get('/', language, pages.index);
app.get('/legal', language, pages.legal);
app.get('/jsconfig.js', require('./jsconfig'));

View File

@ -1,2 +1,8 @@
env:
browser: true
browser: true
parserOptions:
sourceType: module
rules:
node/no-unsupported-features: off

View File

@ -1,22 +0,0 @@
const webdriver = require('selenium-webdriver');
const path = require('path');
const until = webdriver.until;
const driver = new webdriver.Builder().forBrowser('firefox').build();
driver.get(path.join('file:///', __dirname, '/frontend.test.html'));
driver.wait(until.titleIs('Mocha Tests'));
driver.wait(until.titleMatches(/^[0-9]$/));
driver.getTitle().then(title => {
driver.quit().then(() => {
if (title === '0') {
console.log('Frontend tests have passed.');
} else {
throw new Error(
'Frontend tests are failing. ' +
'Please open the frontend.test.html file in a browser.'
);
}
});
});

View File

@ -1,22 +0,0 @@
class FakeFile extends Blob {
constructor(name, data, opt) {
super(data, opt);
this.name = name;
}
}
window.Raven = {
captureException: function(err) {
console.error(err, err.stack);
}
};
window.FakeFile = FakeFile;
window.FileSender = require('../../app/fileSender');
window.FileReceiver = require('../../app/fileReceiver');
window.sinon = require('sinon');
window.server = window.sinon.fakeServer.create();
window.assert = require('assert');
const utils = require('../../app/utils');
window.b64ToArray = utils.b64ToArray;
window.arrayToB64 = utils.arrayToB64;

View File

@ -1,24 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>Mocha Tests</title>
<link rel="stylesheet" href="../../node_modules/mocha/mocha.css">
<script src="bundle.js"></script>
<meta charset="utf-8"/>
</head>
<body>
<div id="mocha"></div>
<script src="../../node_modules/mocha/mocha.js"></script>
<script>mocha.setup('bdd')</script>
<script src="frontend.test.js"></script>
<script>
mocha.checkLeaks();
mocha.globals(['jQuery']);
mocha.run(function(err) {
document.title = err;
});
</script>
</body>
</html>

View File

@ -1,230 +0,0 @@
const FileSender = window.FileSender;
const FileReceiver = window.FileReceiver;
const FakeFile = window.FakeFile;
const assert = window.assert;
const server = window.server;
const b64ToArray = window.b64ToArray;
const sinon = window.sinon;
let file;
let encryptedIV;
let secretKey;
let originalBlob;
describe('File Sender', function() {
before(function() {
server.respondImmediately = true;
server.respondWith('POST', '/upload', function(request) {
const reader = new FileReader();
reader.readAsArrayBuffer(request.requestBody.get('data'));
reader.onload = function(event) {
file = this.result;
};
const responseObj = JSON.parse(request.requestHeaders['X-File-Metadata']);
request.respond(
200,
{ 'Content-Type': 'application/json' },
JSON.stringify({
url: 'some url',
id: responseObj.id,
delete: responseObj.delete
})
);
});
});
it('Should get a loading event emission', function() {
const file = new FakeFile('hello_world.txt', ['This is some data.']);
const fs = new FileSender(file);
let testLoading = true;
fs.on('loading', isStillLoading => {
assert(!(!testLoading && isStillLoading));
testLoading = isStillLoading;
});
return fs
.upload()
.then(info => {
assert(info);
assert(!testLoading);
})
.catch(err => {
console.log(err, err.stack);
assert.fail();
});
});
it('Should get a encrypting event emission', function() {
const file = new FakeFile('hello_world.txt', ['This is some data.']);
const fs = new FileSender(file);
let testEncrypting = true;
fs.on('encrypting', isStillEncrypting => {
assert(!(!testEncrypting && isStillEncrypting));
testEncrypting = isStillEncrypting;
});
return fs
.upload()
.then(info => {
assert(info);
assert(!testEncrypting);
})
.catch(err => {
console.log(err, err.stack);
assert.fail();
});
});
it('Should encrypt a file properly', function(done) {
const newFile = new FakeFile('hello_world.txt', ['This is some data.']);
const fs = new FileSender(newFile);
fs.upload().then(info => {
const key = info.secretKey;
secretKey = info.secretKey;
const IV = info.fileId;
encryptedIV = info.fileId;
const readRaw = new FileReader();
readRaw.onload = function(event) {
const rawArray = new Uint8Array(this.result);
originalBlob = rawArray;
window.crypto.subtle
.importKey(
'jwk',
{
kty: 'oct',
k: key,
alg: 'A128GCM',
ext: true
},
{
name: 'AES-GCM'
},
true,
['encrypt', 'decrypt']
)
.then(cryptoKey => {
window.crypto.subtle
.encrypt(
{
name: 'AES-GCM',
iv: b64ToArray(IV),
tagLength: 128
},
cryptoKey,
rawArray
)
.then(encrypted => {
assert(
new Uint8Array(encrypted).toString() ===
new Uint8Array(file).toString()
);
done();
});
});
};
readRaw.readAsArrayBuffer(newFile);
});
});
});
describe('File Receiver', function() {
class FakeXHR {
constructor() {
this.response = file;
this.status = 200;
}
static setup() {
FakeXHR.prototype.open = sinon.spy();
FakeXHR.prototype.send = function() {
this.onload();
};
FakeXHR.prototype.originalXHR = window.XMLHttpRequest;
FakeXHR.prototype.getResponseHeader = function() {
return JSON.stringify({
filename: 'hello_world.txt',
id: encryptedIV
});
};
window.XMLHttpRequest = FakeXHR;
}
static restore() {
// originalXHR is a sinon FakeXMLHttpRequest, since
// fakeServer.create() is called in frontend.bundle.js
window.XMLHttpRequest.prototype.originalXHR.restore();
}
}
const cb = function(done) {
if (
file === undefined ||
encryptedIV === undefined ||
secretKey === undefined
) {
assert.fail(
'Please run file sending tests before trying to receive the files.'
);
done();
}
FakeXHR.setup();
done();
};
before(cb);
after(function() {
FakeXHR.restore();
});
it('Should decrypt properly', function() {
const fr = new FileReceiver();
location.hash = secretKey;
return fr
.download()
.then(([decrypted, name]) => {
assert(name);
assert(
new Uint8Array(decrypted).toString() ===
new Uint8Array(originalBlob).toString()
);
})
.catch(err => {
console.log(err, err.stack);
assert.fail();
});
});
it('Should emit decrypting events', function() {
const fr = new FileReceiver();
location.hash = secretKey;
let testDecrypting = true;
fr.on('decrypting', isStillDecrypting => {
assert(!(!testDecrypting && isStillDecrypting));
testDecrypting = isStillDecrypting;
});
return fr
.download()
.then(([decrypted, name]) => {
assert(decrypted);
assert(name);
assert(!testDecrypting);
})
.catch(err => {
console.log(err, err.stack);
assert.fail();
});
});
});

16
test/frontend/index.js Normal file
View File

@ -0,0 +1,16 @@
const fs = require('fs');
const path = require('path');
function kv(f) {
return `require('./tests/${f}')`;
}
module.exports = function() {
const files = fs.readdirSync(path.join(__dirname, 'tests'));
const code = files.map(kv).join(';\n');
return {
code,
dependencies: files.map(f => require.resolve('./tests/' + f)),
cacheable: false
};
};

47
test/frontend/routes.js Normal file
View File

@ -0,0 +1,47 @@
const html = require('choo/html');
const assets = require('../../common/assets');
module.exports = function(app) {
app.get('/mocha.css', function(req, res) {
res.sendFile(require.resolve('mocha/mocha.css'));
});
app.get('/mocha.js', function(req, res) {
res.sendFile(require.resolve('mocha/mocha.js'));
});
app.get('/test', function(req, res) {
res.send(
html`
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" type="text/css" href="/mocha.css" />
<script src="/mocha.js"></script>
<script>
const reporters = mocha.constructor.reporters;
function Combo(runner) {
reporters.HTML.call(this, runner)
reporters.JSON.call(this, runner)
}
Object.setPrototypeOf(Combo.prototype, reporters.HTML.prototype)
mocha.setup({
ui: 'bdd',
reporter: Combo
})
</script>
<script src="/jsconfig.js"></script>
<script src="${assets.get('runtime.js')}"></script>
<script src="${assets.get('vendor.js')}"></script>
<script src="${assets.get('tests.js')}"></script>
</head>
<body>
<div id="mocha"></div>
<script>
mocha.checkLeaks();
const runner = mocha.run();
</script>
</body>
</html>
`.toString()
);
});
};

63
test/frontend/runner.js Normal file
View File

@ -0,0 +1,63 @@
/* eslint-disable no-undef, no-process-exit */
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
const puppeteer = require('puppeteer');
const webpack = require('webpack');
const config = require('../../webpack.config');
const middleware = require('webpack-dev-middleware');
const express = require('express');
const devRoutes = require('../../server/dev');
const app = express();
const wpm = middleware(webpack(config), { logLevel: 'silent' });
app.use(wpm);
devRoutes(app, { middleware: wpm });
function onConsole(msg) {
// excluding 'log' because mocha uses it to write the json output
if (msg.type() !== 'log') {
console.error(msg.text());
}
}
const server = app.listen(async function() {
let exitCode = -1;
const browser = await puppeteer.launch();
try {
const page = await browser.newPage();
page.on('console', onConsole);
page.on('pageerror', console.log.bind(console));
await page.goto(`http://127.0.0.1:${server.address().port}/test`);
await page.waitFor(() => typeof runner.testResults !== 'undefined', {
timeout: 5000
});
const results = await page.evaluate(() => runner.testResults);
const coverage = await page.evaluate(() => __coverage__);
if (coverage) {
const dir = path.resolve(__dirname, '../../.nyc_output');
mkdirp.sync(dir);
fs.writeFileSync(
path.resolve(dir, 'frontend.json'),
JSON.stringify(coverage)
);
}
const stats = results.stats;
exitCode = stats.failures;
console.log(`${stats.passes} passing (${stats.duration}ms)\n`);
if (stats.failures) {
console.log('Failures:\n');
for (const f of results.failures) {
console.log(`${f.fullTitle}`);
console.log(` ${f.err.stack}\n`);
}
}
} catch (e) {
console.log(e);
} finally {
browser.close();
server.close(() => {
process.exit(exitCode);
});
}
});

View File

@ -0,0 +1,43 @@
import assert from 'assert';
import * as api from '../../../app/api';
import Keychain from '../../../app/keychain';
const encoder = new TextEncoder();
const plaintext = encoder.encode('hello world!');
const metadata = {
name: 'test.txt',
type: 'text/plain'
};
describe('API', function() {
describe('uploadFile', function() {
it('returns file info on success', async function() {
const keychain = new Keychain();
const encrypted = await keychain.encryptFile(plaintext);
const meta = await keychain.encryptMetadata(metadata);
const verifierB64 = await keychain.authKeyB64();
const p = function() {};
const up = api.uploadFile(encrypted, meta, verifierB64, keychain, p);
const result = await up.result;
assert.ok(result.url);
assert.ok(result.id);
assert.ok(result.ownerToken);
});
it('can be cancelled', async function() {
const keychain = new Keychain();
const encrypted = await keychain.encryptFile(plaintext);
const meta = await keychain.encryptMetadata(metadata);
const verifierB64 = await keychain.authKeyB64();
const p = function() {};
const up = api.uploadFile(encrypted, meta, verifierB64, keychain, p);
up.cancel();
try {
await up.result;
assert.fail('not cancelled');
} catch (e) {
assert.equal(e.message, '0');
}
});
});
});

View File

@ -0,0 +1,17 @@
import assert from 'assert';
import FileSender from '../../../app/fileSender';
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob(['hello world!'], { type: 'text/plain' });
blob.name = 'text.txt';
describe('FileSender', function() {
describe('upload', function() {
it('returns an OwnedFile on success', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
assert.ok(file.id);
assert.equal(file.name, blob.name);
});
});
});

View File

@ -0,0 +1,41 @@
import assert from 'assert';
import Keychain from '../../../app/keychain';
describe('Keychain', function() {
describe('setPassword', function() {
it('changes the authKey', async function() {
const k = new Keychain();
const original = await k.authKeyB64();
k.setPassword('foo', 'some://url');
const pwd = await k.authKeyB64();
assert.notEqual(pwd, original);
});
});
describe('encrypt / decrypt file', function() {
it('can decrypt text it encrypts', async function() {
const enc = new TextEncoder();
const dec = new TextDecoder();
const text = 'hello world!';
const k = new Keychain();
const ciphertext = await k.encryptFile(enc.encode(text));
assert.notEqual(dec.decode(ciphertext), text);
const plaintext = await k.decryptFile(ciphertext);
assert.equal(dec.decode(plaintext), text);
});
});
describe('encrypt / decrypt metadata', function() {
it('can decrypt metadata it encrypts', async function() {
const k = new Keychain();
const meta = {
name: 'foo',
type: 'bar/baz'
};
const ciphertext = await k.encryptMetadata(meta);
const result = await k.decryptMetadata(ciphertext);
assert.equal(result.name, meta.name);
assert.equal(result.type, meta.type);
});
});
});

View File

@ -0,0 +1,180 @@
import assert from 'assert';
import FileSender from '../../../app/fileSender';
import FileReceiver from '../../../app/fileReceiver';
const headless = /Headless/.test(navigator.userAgent);
const noSave = !headless; // only run the saveFile code if headless
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob(['hello world!'], { type: 'text/plain' });
blob.name = 'test.txt';
describe('Upload / Download flow', function() {
it('can only download once by default', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
nonce: file.keychain.nonce,
requiresPassword: false
});
await fr.getMetadata();
await fr.download(noSave);
try {
await fr.download(noSave);
assert.fail('downloaded again');
} catch (e) {
assert.equal(e.message, '404');
}
});
it('downloads with the correct password', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
await file.setPassword('magic');
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
url: file.url,
nonce: file.keychain.nonce,
requiresPassword: true,
password: 'magic'
});
await fr.getMetadata();
await fr.download(noSave);
assert.equal(fr.state, 'complete');
});
it('blocks invalid passwords from downloading', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
await file.setPassword('magic');
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
url: file.url,
nonce: file.keychain.nonce,
requiresPassword: true,
password: 'password'
});
try {
await fr.getMetadata();
assert.fail('got metadata with bad password');
} catch (e) {
assert.equal(e.message, '401');
}
try {
// We can't decrypt without IV from metadata
// but let's try to download anyway
await fr.download();
assert.fail('downloaded file with bad password');
} catch (e) {
assert.equal(e.message, '401');
}
});
it('retries a bad nonce', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
nonce: null, // oops
requiresPassword: false
});
await fr.getMetadata();
assert.equal(fr.fileInfo.name, blob.name);
});
it('can cancel the upload', async function() {
const fs = new FileSender(blob);
const up = fs.upload();
fs.cancel(); // before encrypting
try {
await up;
assert.fail('not cancelled');
} catch (e) {
assert.equal(e.message, '0');
}
fs.reset();
fs.once('encrypting', () => fs.cancel());
try {
await fs.upload();
assert.fail('not cancelled');
} catch (e) {
assert.equal(e.message, '0');
}
fs.reset();
fs.once('progress', () => fs.cancel());
try {
await fs.upload();
assert.fail('not cancelled');
} catch (e) {
assert.equal(e.message, '0');
}
});
it('can cancel the download', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
nonce: file.keychain.nonce,
requiresPassword: false
});
await fr.getMetadata();
fr.once('progress', () => fr.cancel());
try {
await fr.download(noSave);
assert.fail('not cancelled');
} catch (e) {
assert.equal(e.message, '0');
}
});
it('can allow multiple downloads', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
nonce: file.keychain.nonce,
requiresPassword: false
});
await file.changeLimit(2);
await fr.getMetadata();
await fr.download(noSave);
await file.updateDownloadCount();
assert.equal(file.dtotal, 1);
await fr.download(noSave);
await file.updateDownloadCount();
assert.equal(file.dtotal, 2);
try {
await fr.download(noSave);
assert.fail('downloaded too many times');
} catch (e) {
assert.equal(e.message, '404');
}
});
it('can delete the file before download', async function() {
const fs = new FileSender(blob);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
nonce: file.keychain.nonce,
requiresPassword: false
});
await file.del();
try {
await fr.getMetadata();
assert.fail('file still exists');
} catch (e) {
assert.equal(e.message, '404');
}
});
});

View File

@ -11,12 +11,19 @@ const regularJSOptions = {
plugins: ['yo-yoify']
};
const entry = {
vendor: ['babel-polyfill', 'fluent'],
app: ['./app/main.js'],
style: ['./app/main.css']
};
if (IS_DEV) {
entry.tests = ['./test/frontend/index.js'];
regularJSOptions.plugins.push('istanbul');
}
module.exports = {
entry: {
vendor: ['babel-polyfill', 'fluent'],
app: ['./app/main.js'],
style: ['./app/main.css']
},
entry,
output: {
filename: '[name].[chunkhash:8].js',
path: path.resolve(__dirname, 'dist'),
@ -126,6 +133,10 @@ module.exports = {
'./build/fluent_loader'
]
},
{
test: require.resolve('./test/frontend/index.js'),
use: ['babel-loader', 'val-loader']
},
{
test: require.resolve('./build/generate_asset_map.js'),
use: ['babel-loader', 'val-loader']