1
0
mirror of https://gitlab.com/timvisee/send.git synced 2024-11-08 20:22:45 +01:00

added fxa auth to /ws

This commit is contained in:
Danny Coates 2018-08-31 14:20:15 -07:00
parent fb7176d989
commit 85185d048c
No known key found for this signature in database
GPG Key ID: 4C442633C62E00CB
8 changed files with 89 additions and 92 deletions

View File

@ -113,17 +113,13 @@ function asyncInitWebSocket(server) {
function listenForResponse(ws, canceller) {
return new Promise((resolve, reject) => {
ws.addEventListener('message', function(msg) {
function handleMessage(msg) {
try {
const response = JSON.parse(msg.data);
if (response.error) {
throw new Error(response.error);
} else {
resolve({
url: response.url,
id: response.id,
ownerToken: response.owner
});
resolve(response);
}
} catch (e) {
ws.close();
@ -131,7 +127,8 @@ function listenForResponse(ws, canceller) {
canceller.error = e;
reject(e);
}
});
}
ws.addEventListener('message', handleMessage, { once: true });
});
}
@ -140,6 +137,7 @@ async function upload(
metadata,
verifierB64,
timeLimit,
dlimit,
bearerToken,
onprogress,
canceller
@ -160,12 +158,14 @@ async function upload(
fileMetadata: metadataHeader,
authorization: `send-v1 ${verifierB64}`,
bearer: bearerToken,
timeLimit
timeLimit,
dlimit
};
const responsePromise = listenForResponse(ws, canceller);
const uploadInfoResponse = listenForResponse(ws, canceller);
ws.send(JSON.stringify(fileMeta));
const uploadInfo = await uploadInfoResponse;
const completedResponse = listenForResponse(ws, canceller);
const reader = stream.getReader();
let state = await reader.read();
@ -188,9 +188,9 @@ async function upload(
const footer = new Uint8Array([0]);
ws.send(footer);
const response = await responsePromise; //promise only fufills if response is good
await completedResponse;
ws.close();
return response;
return uploadInfo;
} catch (e) {
ws.close(4000);
throw e;
@ -202,6 +202,7 @@ export function uploadWs(
metadata,
verifierB64,
timeLimit,
dlimit,
bearerToken,
onprogress
) {
@ -218,6 +219,7 @@ export function uploadWs(
metadata,
verifierB64,
timeLimit,
dlimit,
bearerToken,
onprogress,
canceller
@ -244,7 +246,6 @@ async function downloadS(id, keychain, signal) {
if (response.status !== 200) {
throw new Error(response.status);
}
//const fileSize = response.headers.get('Content-Length');
return response.body;
}

View File

@ -52,7 +52,7 @@ export default function(state, emitter) {
emitter.on('logout', () => {
state.user.logout();
render();
emitter.emit('pushState', '/');
});
emitter.on('changeLimit', async ({ file, value }) => {
@ -107,7 +107,7 @@ export default function(state, emitter) {
render();
});
emitter.on('upload', async ({ type, dlCount, password }) => {
emitter.on('upload', async ({ type, dlimit, password }) => {
if (!state.archive) return;
if (state.storage.files.length >= LIMITS.MAX_ARCHIVES_PER_USER) {
return alert(
@ -118,11 +118,7 @@ export default function(state, emitter) {
}
const size = state.archive.size;
if (!state.timeLimit) state.timeLimit = DEFAULTS.EXPIRE_SECONDS;
const sender = new FileSender(
state.archive,
state.timeLimit,
state.user.bearerToken
);
const sender = new FileSender();
sender.on('progress', updateProgress);
sender.on('encrypting', render);
@ -136,17 +132,21 @@ export default function(state, emitter) {
try {
metrics.startedUpload({ size, type });
const ownedFile = await sender.upload();
const ownedFile = await sender.upload(
state.archive,
state.timeLimit,
dlimit,
state.user.bearerToken
);
ownedFile.type = type;
state.storage.totalUploads += 1;
metrics.completedUpload(ownedFile);
state.storage.addFile(ownedFile);
// TODO integrate password and limit into /upload request
// TODO integrate password into /upload request
if (password) {
emitter.emit('password', { password, file: ownedFile });
}
emitter.emit('changeLimit', { file: ownedFile, value: dlCount });
const cancelBtn = document.getElementById('cancel-upload');
if (cancelBtn) {

View File

@ -7,11 +7,8 @@ import { uploadWs } from './api';
import { encryptedSize } from './ece';
export default class FileSender extends Nanobus {
constructor(file, timeLimit, bearerToken) {
constructor() {
super('FileSender');
this.timeLimit = timeLimit || DEFAULTS.EXPIRE_SECONDS;
this.bearerToken = bearerToken;
this.file = file;
this.keychain = new Keychain();
this.reset();
}
@ -45,39 +42,30 @@ export default class FileSender extends Nanobus {
}
}
readFile() {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsArrayBuffer(this.file);
// TODO: progress?
reader.onload = function(event) {
const plaintext = new Uint8Array(this.result);
resolve(plaintext);
};
reader.onerror = function(err) {
reject(err);
};
});
}
async upload() {
async upload(
file,
timeLimit = DEFAULTS.EXPIRE_SECONDS,
dlimit = 1,
bearerToken
) {
const start = Date.now();
if (this.cancelled) {
throw new Error(0);
}
this.msg = 'encryptingFile';
this.emit('encrypting');
const totalSize = encryptedSize(this.file.size);
const encStream = await this.keychain.encryptStream(this.file.stream);
const metadata = await this.keychain.encryptMetadata(this.file);
const totalSize = encryptedSize(file.size);
const encStream = await this.keychain.encryptStream(file.stream);
const metadata = await this.keychain.encryptMetadata(file);
const authKeyB64 = await this.keychain.authKeyB64();
this.uploadRequest = uploadWs(
encStream,
metadata,
authKeyB64,
this.timeLimit,
this.bearerToken,
timeLimit,
dlimit,
bearerToken,
p => {
this.progress = [p, totalSize];
this.emit('progress');
@ -100,17 +88,17 @@ export default class FileSender extends Nanobus {
const ownedFile = new OwnedFile({
id: result.id,
url: `${result.url}#${secretKey}`,
name: this.file.name,
size: this.file.size,
manifest: this.file.manifest,
name: file.name,
size: file.size,
manifest: file.manifest,
time: time,
speed: this.file.size / (time / 1000),
speed: file.size / (time / 1000),
createdAt: Date.now(),
expiresAt: Date.now() + this.timeLimit * 1000,
expiresAt: Date.now() + timeLimit * 1000,
secretKey: secretKey,
nonce: this.keychain.nonce,
ownerToken: result.ownerToken,
timeLimit: this.timeLimit
timeLimit: timeLimit
});
return ownedFile;

View File

@ -129,7 +129,7 @@ module.exports = function(state, emit) {
emit('upload', {
type: 'click',
dlCount: state.downloadCount || 1,
dlimit: state.downloadCount || 1,
password: state.password
});
}

View File

@ -5,7 +5,7 @@ const mozlog = require('../log');
const Limiter = require('../limiter');
const Parser = require('../streamparser');
const wsStream = require('websocket-stream/stream');
// const fxa = require('./fxa');
const fxa = require('./fxa');
const log = mozlog('send.upload');
@ -24,22 +24,27 @@ module.exports = function(ws, req) {
const owner = crypto.randomBytes(10).toString('hex');
const fileInfo = JSON.parse(message);
const timeLimit = fileInfo.timeLimit;
const timeLimit = fileInfo.timeLimit || config.default_expire_seconds;
const dlimit = fileInfo.dlimit || 1;
const metadata = fileInfo.fileMetadata;
const auth = fileInfo.authorization;
const user = '1'; //await fxa.verify(fileInfo.bearer); // TODO
const user = await fxa.verify(fileInfo.bearer);
const maxFileSize = user
? config.max_file_size
: config.anon_max_file_size;
const maxExpireSeconds = user
? config.max_expire_seconds
: config.anon_max_expire_seconds;
const maxDownloads = user
? config.max_downloads
: config.anon_max_downloads;
if (
!metadata ||
!auth ||
timeLimit <= 0 ||
timeLimit > maxExpireSeconds
timeLimit > maxExpireSeconds ||
dlimit > maxDownloads
) {
ws.send(
JSON.stringify({
@ -52,6 +57,7 @@ module.exports = function(ws, req) {
const meta = {
owner,
metadata,
dlimit,
auth: auth.split(' ')[1],
nonce: crypto.randomBytes(16).toString('base64')
};
@ -59,6 +65,14 @@ module.exports = function(ws, req) {
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
ws.send(
JSON.stringify({
url,
ownerToken: meta.owner,
id: newId
})
);
const limiter = new Limiter(maxFileSize);
const parser = new Parser();
fileStream = wsStream(ws, { binary: true })
@ -74,14 +88,7 @@ module.exports = function(ws, req) {
// TODO: we should handle cancelled uploads differently
// in order to avoid having to check socket state and clean
// up storage, possibly with an exception that we can catch.
ws.send(
JSON.stringify({
url,
owner: meta.owner,
id: newId,
authentication: `send-v1 ${meta.nonce}`
})
);
ws.send(JSON.stringify({ ok: true }));
}
} catch (e) {
log.error('upload', e);

View File

@ -24,6 +24,7 @@ describe('API', function() {
meta,
verifierB64,
DEFAULTS.EXPIRE_SECONDS,
1,
null,
p
);

View File

@ -10,8 +10,8 @@ const archive = new Archive([blob]);
describe('FileSender', function() {
describe('upload', function() {
it('returns an OwnedFile on success', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
assert.ok(file.id);
assert.equal(file.name, archive.name);
});

View File

@ -18,8 +18,8 @@ navigator.serviceWorker.register('/serviceWorker.js');
describe('Upload / Download flow', function() {
this.timeout(0);
it('can only download once by default', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -38,8 +38,8 @@ describe('Upload / Download flow', function() {
});
it('downloads with the correct password', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
await file.setPassword('magic');
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -55,8 +55,8 @@ describe('Upload / Download flow', function() {
});
it('blocks invalid passwords from downloading', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
await file.setPassword('magic');
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -83,8 +83,8 @@ describe('Upload / Download flow', function() {
});
it('retries a bad nonce', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -96,8 +96,8 @@ describe('Upload / Download flow', function() {
});
it('can cancel the upload', async function() {
const fs = new FileSender(archive);
const up = fs.upload();
const fs = new FileSender();
const up = fs.upload(archive);
fs.cancel(); // before encrypting
try {
await up;
@ -108,7 +108,7 @@ describe('Upload / Download flow', function() {
fs.reset();
fs.once('encrypting', () => fs.cancel());
try {
await fs.upload();
await fs.upload(archive);
assert.fail('not cancelled 2');
} catch (e) {
assert.equal(e.message, '0');
@ -116,7 +116,7 @@ describe('Upload / Download flow', function() {
fs.reset();
fs.once('progress', () => fs.cancel());
try {
await fs.upload();
await fs.upload(archive);
assert.fail('not cancelled 3');
} catch (e) {
assert.equal(e.message, '0');
@ -124,8 +124,8 @@ describe('Upload / Download flow', function() {
});
it('can cancel the download', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -144,8 +144,8 @@ describe('Upload / Download flow', function() {
it('can increase download count on download', async function() {
this.timeout(0);
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -159,8 +159,8 @@ describe('Upload / Download flow', function() {
});
it('does not increase download count when download cancelled', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -180,8 +180,8 @@ describe('Upload / Download flow', function() {
});
it('can allow multiple downloads', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,
@ -206,8 +206,8 @@ describe('Upload / Download flow', function() {
});
it('can delete the file before download', async function() {
const fs = new FileSender(archive);
const file = await fs.upload();
const fs = new FileSender();
const file = await fs.upload(archive);
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
id: file.id,