npm run dev now runs on local file storage. npm start runs in production, but if there is either no aws bucket or bitly key specified as env vars, it defaults back to local storage

This commit is contained in:
Abhinav Adduri 2017-06-07 14:07:31 -07:00
parent 8bb42c137a
commit 1ad71904bc
3 changed files with 243 additions and 83 deletions

View File

@ -3,10 +3,12 @@ const convict = require('convict');
let conf = convict({
bitly_key: {
format: String,
default: 'localhost',
env: 'P2P_BITLY_KEY'
},
s3_bucket: {
format: String,
default: 'localhost',
env: 'P2P_S3_BUCKET'
},
redis_host: {

View File

@ -7,8 +7,12 @@ const crypto = require('crypto');
const conf = require('./config.js');
const stream = require('stream');
const fetch = require('node-fetch');
const storage = require('./storage.js');
let isProduction = conf.env === 'production';
let isProduction =
conf.env === 'production' &&
conf.s3_bucket !== 'localhost' &&
conf.bitly_key !== 'localhost';
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
@ -37,34 +41,28 @@ app.get('/assets/download/:id', (req, res) => {
}
redis_client.hget(id, 'filename', (err, reply) => {
// maybe some expiration logic too
if (!reply) {
res.sendStatus(404);
} else {
let params = {
Bucket: config.s3_bucket,
Key: id
};
s3.headObject(params, function(err, data) {
storage.length(id).then(contentLength => {
res.writeHead(200, {
'Content-Disposition': 'attachment; filename=' + reply,
'Content-Type': 'application/octet-stream',
'Content-Length': data.ContentLength
'Content-Length': contentLength
});
});
let file_stream = s3.getObject(params).createReadStream();
file_stream.on('finish', () => {
redis_client.del(id);
s3.deleteObject(params, function(err, data) {
let file_stream = storage.get(id);
file_stream.on('close', () => {
storage.forceDelete(id, redis_client).then(err => {
if (!err) {
console.log('Deleted off s3.');
console.log('Deleted.');
}
});
});
file_stream.pipe(res);
});
}
});
});
@ -83,25 +81,14 @@ app.post('/delete/:id', (req, res) => {
res.sendStatus(404);
}
redis_client.hget(id, 'delete', (err, reply) => {
if (!reply || delete_token !== reply) {
res.sendStatus(404);
} else {
redis_client.del(id);
let params = {
Bucket: config.s3_bucket,
Key: id
};
s3.deleteObject(params, function(err, data) {
storage
.delete(id, redis_client, delete_token)
.then(err => {
if (!err) {
console.log('Deleted off s3.');
}
});
res.sendStatus(200);
}
});
})
.catch(err => res.sendStatus(404));
});
app.post('/upload/:id', (req, res, next) => {
@ -113,49 +100,12 @@ app.post('/upload/:id', (req, res, next) => {
req.pipe(req.busboy);
req.busboy.on('file', (fieldname, file, filename) => {
console.log('Uploading: ' + filename);
let params = {
Bucket: config.s3_bucket,
Key: req.params.id,
Body: file
};
s3.upload(params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
} else {
let id = req.params.id;
let uuid = crypto.randomBytes(10).toString('hex');
redis_client.hmset([id, 'filename', filename, 'delete', uuid]);
redis_client.expire(id, 86400000);
console.log('Upload Finished of ' + filename);
let url = `${req.protocol}://${req.get('host')}/download/${req.params.id}/`;
if (config.bitly_key) {
fetch(
'https://api-ssl.bitly.com/v3/shorten?access_token=' +
config.bitly_key +
'&longUrl=' +
encodeURIComponent(url) +
'&format=txt'
)
.then(res => {
return res.text();
})
.then(body => {
res.json({
uuid: uuid,
url: body
});
});
} else {
res.json({
uuid: uuid,
url: url
});
}
}
storage
.set(req.params.id, file, filename, redis_client, url)
.then(linkAndID => {
res.json(linkAndID);
});
});
});

208
server/storage.js Normal file
View File

@ -0,0 +1,208 @@
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const conf = require('./config.js');
const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch');
const crypto = require('crypto');
let isProduction =
conf.env === 'production' &&
conf.s3_bucket !== 'localhost' &&
conf.bitly_key !== 'localhost';
if (isProduction) {
module.exports = {
length: AWSLength,
get: AWSGet,
set: AWSSet,
delete: AWSDelete,
forceDelete: AWSForceDelete
};
} else {
module.exports = {
length: LocalLength,
get: LocalGet,
set: LocalSet,
delete: LocalDelete,
forceDelete: LocalForceDelete
};
}
function LocalLength(id) {
return new Promise((resolve, reject) => {
try {
resolve(fs.statSync(__dirname + '/../static/' + id).size);
} catch (err) {
reject();
}
});
}
function LocalGet(id) {
return fs.createReadStream(__dirname + '/../static/' + id);
}
function LocalSet(id, file, filename, client, url) {
return new Promise((resolve, reject) => {
fstream = fs.createWriteStream(__dirname + '/../static/' + id);
file.pipe(fstream);
fstream.on('close', () => {
let uuid = crypto.randomBytes(10).toString('hex');
client.hmset([id, 'filename', filename, 'delete', uuid]);
client.expire(id, 86400000);
console.log('Upload Finished of ' + filename);
resolve({
uuid: uuid,
url: url
});
});
});
}
function LocalDelete(id, client, delete_token) {
return new Promise((resolve, reject) => {
client.hget(id, 'delete', (err, reply) => {
if (!reply || delete_token !== reply) {
resolve(
new Promise((resolve, reject) => {
reject();
})
);
} else {
resolve(
new Promise((resolve, reject) => {
client.del(id);
resolve(fs.unlinkSync(__dirname + '/../static/' + id));
})
);
}
});
});
}
function LocalForceDelete(id, client) {
return new Promise((resolve, reject) => {
client.del(id);
resolve(fs.unlinkSync(__dirname + '/../static/' + id));
});
}
function AWSLength(id) {
let params = {
Bucket: conf.s3_bucket,
Key: id
};
return new Promise((resolve, reject) => {
s3.headObject(params, function(err, data) {
resolve(data.ContentLength);
});
});
}
function AWSGet(id) {
let params = {
Bucket: conf.s3_bucket,
Key: id
};
return s3.getObject(params).createReadStream();
}
function AWSSet(id, file, filename, client, url) {
let params = {
Bucket: conf.s3_bucket,
Key: id,
Body: file
};
return new Promise((resolve, reject) => {
s3.upload(params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
} else {
let uuid = crypto.randomBytes(10).toString('hex');
client.hmset([id, 'filename', filename, 'delete', uuid]);
client.expire(id, 86400000);
console.log('Upload Finished of ' + filename);
resolve(
new Promise((resolve, reject) => {
if (conf.bitly_key) {
fetch(
'https://api-ssl.bitly.com/v3/shorten?access_token=' +
conf.bitly_key +
'&longUrl=' +
encodeURIComponent(url) +
'&format=txt'
)
.then(res => {
return res.text();
})
.then(body => {
resolve({
uuid: uuid,
url: body
});
});
} else {
resolve({
uuid: uuid,
url: url
});
}
})
);
}
});
});
}
function AWSDelete(id, client, delete_token) {
return new Promise((resolve, reject) => {
client.hget(id, 'delete', (err, reply) => {
if (!reply || delete_token !== reply) {
resolve(
new Promise((resolve, reject) => {
reject();
})
);
} else {
client.del(id);
let params = {
Bucket: conf.s3_bucket,
Key: id
};
resolve(
new Promise((resolve, reject) => {
s3.deleteObject(params, function(err, data) {
resolve(err);
});
})
);
}
});
});
}
function AWSForceDelete(id, client) {
return new Promise((resolve, reject) => {
client.del(id);
let params = {
Bucket: conf.s3_bucket,
Key: id
};
resolve(
new Promise((resolve, reject) => {
s3.deleteObject(params, function(err, data) {
resolve(err);
});
})
);
});
}