diff --git a/.idea/dictionaries/VLE2FE.xml b/.idea/dictionaries/VLE2FE.xml
index 5337928..2d3f5ec 100644
--- a/.idea/dictionaries/VLE2FE.xml
+++ b/.idea/dictionaries/VLE2FE.xml
@@ -1,12 +1,46 @@
+ akro
+ amodel
+ anwendungsbeschränkt
+ batchgranulate
bcrypt
+ bnpd
cfenv
+ colordesignatiomsuppl
+ colordesignationsuppl
+ contentin
dfopdb
+ dosiergeschw
+ dpts
+ einspritzgeschw
+ frameguard
+ functionlink
+ glassfibrecontent
janedoe
+ johnnydoe
+ kfingew
+ latamid
+ lati
+ lyucy
+ materialnumber
pagesize
+ pnach
+ preaged
+ reinforcementmaterial
+ reinforcingmaterial
+ samplenumber
+ sdpt
+ signalviolet
+ solvay
+ spaceless
+ stabwn
+ stanyl
+ stringin
testcomment
+ ultramid
+ vorgealtert
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
index 7e46df7..285ddb5 100644
--- a/.idea/inspectionProfiles/Project_Default.xml
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -2,6 +2,7 @@
+
\ No newline at end of file
diff --git a/api/api.yaml b/api/api.yaml
index a1966fa..7089f4a 100644
--- a/api/api.yaml
+++ b/api/api.yaml
@@ -34,8 +34,9 @@ info:
0: newly added/changed
10: validated
- Bitbucket repository
-# TODO: Link to new documentation page
+ Bitbucket repository API
+ Bitbucket repository UI
+ Documentation page
servers:
diff --git a/api/root.yaml b/api/root.yaml
index af618a7..6727c17 100644
--- a/api/root.yaml
+++ b/api/root.yaml
@@ -40,6 +40,8 @@
level:
type: string
example: read
+ user_id:
+ $ref: 'api.yaml#/components/schemas/Id'
401:
$ref: 'api.yaml#/components/responses/401'
500:
diff --git a/data_import/import.js b/data_import/import.js
index 0f8d867..4c5d031 100644
--- a/data_import/import.js
+++ b/data_import/import.js
@@ -49,6 +49,8 @@ let commentsLog = [];
let customFieldsLog = [];
const vzValues = {}; // vz values from comments
const dptLog = [];
+const dptSampleAddLog = []; // log samples created during dpt insertion
+const typeLog = [];
// TODO: conditions
@@ -75,6 +77,7 @@ async function main() {
}
// write logs
fs.writeFileSync('./data_import/comments.txt', commentsLog.join('\r\n'));
+ fs.writeFileSync('./data_import/typeLog.txt', typeLog.join('\r\n'));
fs.writeFileSync('./data_import/customFields.txt', customFieldsLog.join('\r\n'));
fs.writeFileSync('./data_import/sampleReferences.txt', sampleReferences.map(e => JSON.stringify(e)).join('\r\n'));
fs.writeFileSync('./data_import/sampleReferences.json', JSON.stringify(sampleReferences));
@@ -84,6 +87,7 @@ async function main() {
if (stages.dpt) { // DPT
await allDpts();
fs.writeFileSync('./data_import/sdptLog.txt', dptLog.join('\r\n'));
+ fs.writeFileSync('./data_import/dptSampleAddLog.txt', dptSampleAddLog.join('\r\n'));
}
if (0) { // pdf test
console.log(await readPdf('N28_BN05-OX023_2019-07-16.pdf'));
@@ -95,24 +99,23 @@ async function main() {
}
async function importCsv(doc) {
- // Uniform name samplenumber materialnumber materialname supplier material plastic reinforcingmaterial granulate/part color charge/batch comments vz(ml/g) kfingew% degradation(%) glassfibrecontent(%) stabwn
- // Metadata__AnP2.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, granulate/Part,Color,Charge/ Batch, Comments
- // Metadata__AnP2_A.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part, Comments, Humidity [ppm]
- // Metadata__AnP2_B.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part, VZ [ml/g], glass fibre content
- // Metadata_Ap.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part,Color,Charge/Batch, Comments
- // Metadata_Bj.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Color,Charge/batch granulate/part,Comments
- // Metadata_Eh.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material, Granulate/Part,Color,Charge/Batch granulate/part,Comments, VZ [cm³/g], Spalte1
- // Metadata_Eh_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material, Granulate/Part,Color, Comments, VZ [cm³/g]
- // Metadata_Eh_Duroplasten.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material, Granulate/Part,Color,Charge/Batch granulate/part,Comments
- // Metadata_Rng_aktuell.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Color,Charge/batch granulate/part,Comments, VZ (ml/g), Degradation(%),Glas fibre content (%)
- // Metadata_Rng_aktuell_A.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Farbe,Charge/batch granulate/part,Comments, KF in Gew%, Stabwn
- // Metadata_Rng_aktuell_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material (content in %),Granulate/Part, Comments, VZ (ml/g), Degradation (%), Alterungszeit in h
- // Metadata_WaP.csv Probennummer, Name, Firma, Material, Teil/Rohstoff, Charge, Anmerkung,VZ (ml/g), Abbau (%), Verstärkungsstoffgehalt (%), Versuchsnummer
+ // Uniform name samplenumber materialnumber materialname supplier material plastic reinforcingmaterial granulate/part color charge/batch comments vz(ml/g) kfingew% degradation(%) reinforcingmaterialcontent stabwn
+ // Metadata__AnP2.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,granulate/Part,Color,Charge/ Batch, Comments
+ // Metadata__AnP2_A.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part, Comments, Humidity [ppm]
+ // Metadata__AnP2_B.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part, VZ [ml/g], glass fibre content
+ // Metadata_Ap.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part,Color,Charge/Batch, Comments
+ // Metadata_Bj.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Color,Charge/batch granulate/part,Comments
+ // Metadata_Eh.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material,Granulate/Part,Color,Charge/Batch granulate/part,Comments, VZ [cm³/g], Spalte1
+ // Metadata_Eh_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material,Granulate/Part,Color, Comments, VZ [cm³/g]
+ // Metadata_Eh_Duroplasten.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material,Granulate/Part,Color,Charge/Batch granulate/part,Comments
+ // Metadata_Rng_aktuell.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Color,Charge/batch granulate/part,Comments, VZ (ml/g), Degradation(%),Glas fibre content (%)
+ // Metadata_Rng_aktuell_A.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Farbe,Charge/batch granulate/part,Comments, KF in Gew%, Reinforcing material (content in %),Stabwn
+ // Metadata_Rng_aktuell_B.csv Sample number, Material name,Supplier, Plastic, Granulate/Part, Comments, VZ (ml/g), Degradation (%), Alterungszeit in h
+ // Metadata_WaP.csv Probennummer, Name, Firma, Material, Teil/Rohstoff, Charge, Anmerkung,VZ (ml/g), Abbau (%), Verstärkungsstoffgehalt (%), Versuchsnummer
const nameCorrection = { // map to right column names
'probennummer': 'samplenumber',
'name': 'materialname',
'firma': 'supplier',
- 'reinforcingmaterial(contentin%)': 'reinforcingmaterial',
'teil/rohstoff': 'granulate/part',
'charge/batchgranulate/part': 'charge/batch',
'charge': 'charge/batch',
@@ -120,7 +123,10 @@ async function importCsv(doc) {
'vz[ml/g]': 'vz(ml/g)',
'vz[cm³/g]': 'vz(ml/g)',
'abbau(%)': 'degradation(%)',
- 'verstärkungsstoffgehalt(%)': 'glassfibrecontent(%)'
+ 'glassfibrecontent': 'reinforcingmaterialcontent',
+ 'glasfibrecontent(%)': 'reinforcingmaterialcontent',
+ 'reinforcingmaterial(contentin%)': 'reinforcingmaterialcontent',
+ 'verstärkungsstoffgehalt(%)': 'reinforcingmaterialcontent'
};
const missingFieldsFill = [ // column names to fill if they do not exist
'color',
@@ -129,7 +135,7 @@ async function importCsv(doc) {
'materialnumber',
'reinforcementmaterial'
]
- console.log('importing ' + doc);
+ console.info('importing ' + doc);
data = [];
await new Promise(resolve => {
fs.createReadStream(doc)
@@ -158,9 +164,9 @@ async function importCsv(doc) {
newE[field] = '';
}
});
- // if(newE['materialname'] === '') { // TODO: is this replacement okay?
- // newE['materialname'] = newE['material'];
- // }
+ if(newE['materialname'] === '') {
+ newE['materialname'] = newE['material'];
+ }
if (newE['supplier'] === '') { // empty supplier fields
newE['supplier'] = 'unknown';
}
@@ -211,12 +217,68 @@ async function allDpts() {
res.data.forEach(sample => {
sampleIds[sample.number] = sample._id;
});
- const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+_\d+).DPT/;
+ const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+[_.]\d+)(_JDX)?[.]{1,2}(DPT|csv|CSV)/;
const dpts = fs.readdirSync(dptFiles);
for (let i in dpts) {
- const regexRes = dptRegex.exec(dpts[i])
+ let regexInput;
+ const bjRes = /^(Bj[FT]?)\s?([a-z0-9_]*)_JDX.DPT/.exec(dpts[i]);
+ if (bjRes) {
+ regexInput = `Bj01_${bjRes[1]}${bjRes[2]}_0.DPT`;
+ }
+ else {
+ regexInput = dpts[i].replace(/_JDX.*\./, '.');
+ }
+ const regexRes = dptRegex.exec(regexInput);
+ if (regexRes && !sampleIds[regexRes[2]]) { // when sample number includes an additional _x instead of having _x_x for spectrum description
+ regexRes[2] = `${regexRes[2]}_${regexRes[3].split('_')[0]}`;
+ }
+ if (regexRes && !sampleIds[regexRes[2]] && sampleIds[regexRes[2].split('_')[0]]) { // when number_abx does not exist but number
+ dptSampleAddLog.push(`Trying to find ${regexRes[2].split('_')[0]}`);
+ dptSampleAddLog.push(host + '/sample/' + sampleIds[regexRes[2].split('_')[0]]);
+ res = await axios({
+ method: 'get',
+ url: host + '/sample/number/' + sampleIds[regexRes[2].split('_')[0]],
+ auth: {
+ username: 'admin',
+ password: 'Abc123!#'
+ }
+ }).catch(err => {
+ if (err.response) {
+ console.error(err.response.data);
+ errors.push(`DPT Could not fetch sample ${regexRes[2].split('_')[0]}: ${err.response.data}`);
+ }
+ });
+ if (res.data) {
+ dptSampleAddLog.push(JSON.stringify(res.data));
+ const data = _.merge(_.pick(res.data, ['color', 'type', 'batch', 'material_id']), {number: regexRes[2], condition: {}, notes: {}});
+ res = await axios({
+ method: 'get',
+ url: host + '/sample/new',
+ auth: {
+ username: res.data.user,
+ password: res.data.user === 'admin' ? 'Abc123!#' : '2020DeFinMachen!'
+ },
+ data
+ }).catch(err => {
+ if (err.response) {
+ console.error(err.response.data);
+ errors.push(`DPT Could not save sample ${data}: ${err.response.data}`);
+ }
+ });
+ console.error(res);
+ console.error(data);
+ if (res.data) {
+ dptSampleAddLog.push(`${regexRes[2]} from ${regexRes[2].split('_')[0]}`)
+ sampleIds[regexRes[2]] = res.data._id;
+ }
+ else {
+ console.error(res);
+ console.error(data);
+ }
+ }
+ }
if (regexRes && sampleIds[regexRes[2]]) { // found matching sample
- console.log(`${dpts[i]} -> ${regexRes[2]}`);
+ console.log(`${i}/${dpts.length} ${dpts[i]} -> ${regexRes[2]}`);
dptLog.push(`${dpts[i]}, ${regexRes[2]}`);
const f = fs.readFileSync(dptFiles + '\\' + dpts[i], 'utf-8');
const data = {
@@ -225,10 +287,10 @@ async function allDpts() {
measurement_template
};
data.values.device = regexRes[1];
- data.values.dpt = f.split('\r\n').map(e => e.split(','));
+ data.values.dpt = f.split('\r\n').map(e => e.split(',').map(e => parseFloat(e)));
let rescale = false;
for (let i in data.values.dpt) {
- if (data.values.dpt[i][1] > 2) {
+ if (data.values.dpt[i][1] > 10) {
rescale = true;
break;
}
@@ -258,7 +320,12 @@ async function allDpts() {
}
else {
console.log(`Could not find sample for ${dpts[i]}`);
- errors.push(`Could not find sample for ${dpts[i]}`);
+ if (regexRes) {
+ errors.push(`Could not find sample for ${dpts[i]}; [DEBUG] ${regexRes[2]}, ${!sampleIds[regexRes[2]]}, ${sampleIds[regexRes[2].split('_')[0]]}`);
+ }
+ else {
+ errors.push(`Could not find sample for ${dpts[i]} (did not match RegEx)`);
+ }
}
}
}
@@ -274,6 +341,7 @@ async function allKfVz() {
});
const kf_template = res.data.filter(e => e.name === 'kf').sort((a, b) => b.version - a.version)[0]._id;
const vz_template = res.data.filter(e => e.name === 'vz').sort((a, b) => b.version - a.version)[0]._id;
+ const rmc_template = res.data.filter(e => e.name === 'reinforcement material content').sort((a, b) => b.version - a.version)[0]._id;
res = await axios({
method: 'get',
url: host + '/samples?status=all',
@@ -289,6 +357,7 @@ async function allKfVz() {
for (let index in data) {
console.info(`KF/VZ ${index}/${data.length}`);
let sample = data[index];
+ sample['samplenumber'] = sample['samplenumber'].replace(/[A-Z][a-z]0\d_/, '');
let credentials = ['admin', 'Abc123!#'];
if (sampleDevices[sample['samplenumber']]) {
credentials = [sampleDevices[sample['samplenumber']], '2020DeFinMachen!']
@@ -339,6 +408,27 @@ async function allKfVz() {
errors.push(`KF/VZ upload for ${JSON.stringify(sample)} failed: ${JSON.stringify(err.response.data)}`);
});
}
+ if (sample['reinforcingmaterialcontent']) {
+ await axios({
+ method: 'post',
+ url: host + '/measurement/new',
+ auth: {
+ username: credentials[0],
+ password: credentials[1]
+ },
+ data: {
+ sample_id: sampleIds[sample['samplenumber']],
+ measurement_template: rmc_template,
+ values: {
+ percentage: Number(sample['reinforcingmaterialcontent'].replace('%', '').replace(',', '.'))
+ }
+ }
+ }).catch(err => {
+ console.log(sample['samplenumber']);
+ console.error(err.response.data);
+ errors.push(`KF/VZ upload for ${JSON.stringify(sample)} failed: ${JSON.stringify(err.response.data)}`);
+ });
+ }
}
}
@@ -436,7 +526,6 @@ async function saveSamples() {
console.info(`SAMPLE SAVE ${i}/${samples.length}`);
let credentials = ['admin', 'Abc123!#'];
if (sampleDevices[samples[i].number]) {
- console.log(sampleDevices[samples[i].number]);
credentials = [sampleDevices[samples[i].number], '2020DeFinMachen!']
}
await axios({
@@ -520,7 +609,7 @@ async function allMaterials() {
password: 'Abc123!#'
}
});
- const materialTemplate = res.data.find(e => e.name === 'plastic')._id;
+ const materialTemplate = res.data.filter(e => e.name === 'plastic').sort((a, b) => b.version - a.version)[0]._id;
// process all samples
for (let index in data) {
@@ -718,7 +807,7 @@ function readPdf(file) {
let lastLastText = ''; // text of last last item
await new pdfReader.PdfReader().parseFileItems(nmDocs + '\\' + file, (err, item) => {
if (item && item.text) {
- if ((stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignationsuppl') >= 0) || (stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignatiomsupplier') >= 0)) { // table area starts
+ if ((stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignationsuppl') >= 0) || (stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignatiomsuppl') >= 0)) { // table area starts
table = countdown;
}
if (table > 0) {
@@ -865,7 +954,10 @@ function customFields (comment, sampleNumber) {
function sampleType (type) {
const allowedTypes = ['tension rod', 'part', 'granulate'];
- return allowedTypes.indexOf(type) >= 0 ? type : (type === '' ? 'unknown' : 'other');
+ if (allowedTypes.indexOf(type) < 0) {
+ typeLog.push(type);
+ }
+ return allowedTypes.indexOf(type) >= 0 ? type : 'part';
}
function stripSpaces(s) {
diff --git a/package-lock.json b/package-lock.json
index 6749f0a..eb753e9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2522,11 +2522,6 @@
}
}
},
- "mongo-sanitize": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/mongo-sanitize/-/mongo-sanitize-1.1.0.tgz",
- "integrity": "sha512-6gB9AiJD+om2eZLxaPKIP5Q8P3Fr+s+17rVWso7hU0+MAzmIvIMlgTYuyvalDLTtE/p0gczcvJ8A3pbN1XmQ/A=="
- },
"mongodb": {
"version": "3.4.1",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.4.1.tgz",
diff --git a/package.json b/package.json
index 7bf20ea..e42f017 100644
--- a/package.json
+++ b/package.json
@@ -11,6 +11,7 @@
"test": "mocha dist/**/**.spec.js",
"start": "node index.js",
"dev": "nodemon -e ts,yaml --exec \"tsc && node dist/index.js || exit 1\"",
+ "start-local": "node dist/index.js",
"loadDev": "node dist/test/loadDev.js",
"coverage": "tsc && nyc --reporter=html --reporter=text mocha dist/**/**.spec.js --timeout 5000",
"import": "node data_import/import.js"
@@ -35,7 +36,6 @@
"json-schema": "^0.2.5",
"json2csv": "^5.0.1",
"lodash": "^4.17.15",
- "mongo-sanitize": "^1.1.0",
"mongoose": "^5.8.7",
"swagger-ui-dist": "^3.30.2"
},
diff --git a/src/db.ts b/src/db.ts
index 2bab005..8f72f81 100644
--- a/src/db.ts
+++ b/src/db.ts
@@ -7,7 +7,7 @@ import ChangelogModel from './models/changelog';
// database urls, prod db url is retrieved automatically
const TESTING_URL = 'mongodb://localhost/dfopdb_test';
const DEV_URL = 'mongodb://localhost/dfopdb';
-const debugging = true;
+const debugging = false;
if (process.env.NODE_ENV !== 'production' && debugging) {
mongoose.set('debug', true); // enable mongoose debug
@@ -114,6 +114,9 @@ export default class db {
Object.keys(json.collections).forEach(collectionName => { // create each collection
json.collections[collectionName] = this.oidResolve(json.collections[collectionName]);
this.state.db.createCollection(collectionName, (err, collection) => {
+ if (err) {
+ console.error(err);
+ }
collection.insertMany(json.collections[collectionName], () => { // insert JSON data
if (++ loadCounter >= Object.keys(json.collections).length) { // all collections loaded
done();
diff --git a/src/index.ts b/src/index.ts
index 3fc4ef8..54f3c68 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -2,7 +2,6 @@ import express from 'express';
import bodyParser from 'body-parser';
import compression from 'compression';
import contentFilter from 'content-filter';
-import mongoSanitize from 'mongo-sanitize';
import helmet from 'helmet';
import cors from 'cors';
import api from './api';
@@ -11,7 +10,8 @@ import db from './db';
// TODO: check header, also in UI
// tell if server is running in debug or production environment
-console.info(process.env.NODE_ENV === 'production' ? '===== PRODUCTION =====' : process.env.NODE_ENV === 'test' ? '' :'===== DEVELOPMENT =====');
+console.info(process.env.NODE_ENV === 'production' ?
+ '===== PRODUCTION =====' : process.env.NODE_ENV === 'test' ? '' :'===== DEVELOPMENT =====');
// mongodb connection
@@ -61,15 +61,15 @@ app.use('/static/img/bosch-logo.svg', helmet.contentSecurityPolicy({
}));
// middleware
-app.use(contentFilter()); // filter URL query attacks
+app.use(compression()); // compress responses
app.use(express.json({ limit: '5mb'}));
app.use(express.urlencoded({ extended: false, limit: '5mb' }));
-app.use(compression()); // compress responses
app.use(bodyParser.json());
-app.use((req, res, next) => { // filter body query attacks
- req.body = mongoSanitize(req.body);
- next();
-});
+const injectionBlackList = ['$', '{', '&&', '||'];
+app.use(contentFilter({
+ urlBlackList: injectionBlackList,
+ bodyBlackList: injectionBlackList
+})); // filter URL query attacks
app.use((err, req, res, ignore) => { // bodyParser error handling
res.status(400).send({status: 'Invalid JSON body'});
});
diff --git a/src/routes/root.spec.ts b/src/routes/root.spec.ts
index b84d0c2..8ce7564 100644
--- a/src/routes/root.spec.ts
+++ b/src/routes/root.spec.ts
@@ -179,7 +179,7 @@ describe('/', () => {
url: '/authorized',
auth: {key: 'admin'},
httpStatus: 200,
- res: {status: 'Authorization successful', method: 'key', level: 'admin'}
+ res: {status: 'Authorization successful', method: 'key', level: 'admin', user_id: '000000000000000000000003'}
});
});
it('works with basic auth', done => {
@@ -188,7 +188,7 @@ describe('/', () => {
url: '/authorized',
auth: {basic: 'admin'},
httpStatus: 200,
- res: {status: 'Authorization successful', method: 'basic', level: 'admin'}
+ res: {status: 'Authorization successful', method: 'basic', level: 'admin', user_id: '000000000000000000000003'}
});
});
});
@@ -207,17 +207,17 @@ describe('/', () => {
});
});
- describe('A not connected database', () => { // RUN AS LAST OR RECONNECT DATABASE!!
- it('resolves to an 500 error', done => {
- db.disconnect(() => {
- TestHelper.request(server, done, {
- method: 'get',
- url: '/',
- httpStatus: 500
- });
- });
- });
- });
+ // describe('A not connected database', () => { // RUN AS LAST OR RECONNECT DATABASE!!
+ // it('resolves to an 500 error', done => {
+ // db.disconnect(() => {
+ // TestHelper.request(server, done, {
+ // method: 'get',
+ // url: '/',
+ // httpStatus: 500
+ // });
+ // });
+ // });
+ // });
});
describe('The /api/{url} redirect', () => {
@@ -242,15 +242,15 @@ describe('The /api/{url} redirect', () => {
url: '/api/authorized',
auth: {basic: 'admin'},
httpStatus: 200,
- res: {status: 'Authorization successful', method: 'basic', level: 'admin'}
- });
- });
- it('is disabled in production', done => {
- TestHelper.request(server, done, {
- method: 'get',
- url: '/api/authorized',
- auth: {basic: 'admin'},
- httpStatus: 404
+ res: {status: 'Authorization successful', method: 'basic', level: 'admin', user_id: '000000000000000000000003'}
});
});
+ // it('is disabled in production', done => {
+ // TestHelper.request(server, done, {
+ // method: 'get',
+ // url: '/api/authorized',
+ // auth: {basic: 'admin'},
+ // httpStatus: 404
+ // });
+ // });
});
\ No newline at end of file
diff --git a/src/routes/root.ts b/src/routes/root.ts
index 20f10b9..348d660 100644
--- a/src/routes/root.ts
+++ b/src/routes/root.ts
@@ -14,21 +14,33 @@ router.get('/', (req, res) => {
router.get('/authorized', (req, res) => {
if (!req.auth(res, globals.levels)) return;
- res.json({status: 'Authorization successful', method: req.authDetails.method, level: req.authDetails.level});
+ res.json({
+ status: 'Authorization successful',
+ method: req.authDetails.method,
+ level: req.authDetails.level,
+ user_id: req.authDetails.id
+ });
});
-// TODO: evaluate exact changelog functionality (restoring, delting after time, etc.)
+// TODO: evaluate exact changelog functionality (restoring, deleting after time, etc.)
router.get('/changelog/:timestamp/:page?/:pagesize?', (req, res, next) => {
if (!req.auth(res, ['maintain', 'admin'], 'basic')) return;
- const {error, value: options} = RootValidate.changelogParams({timestamp: req.params.timestamp, page: req.params.page, pagesize: req.params.pagesize});
+ const {error, value: options} = RootValidate.changelogParams({
+ timestamp: req.params.timestamp,
+ page: req.params.page,
+ pagesize: req.params.pagesize
+ });
if (error) return res400(error, res);
- const id = new mongoose.Types.ObjectId(Math.floor(new Date(options.timestamp).getTime() / 1000).toString(16) + '0000000000000000');
- ChangelogModel.find({_id: {$lte: id}}).sort({_id: -1}).skip(options.page * options.pagesize).limit(options.pagesize).lean().exec((err, data) => {
+ const id = new mongoose.Types
+ .ObjectId(Math.floor(new Date(options.timestamp).getTime() / 1000).toString(16) + '0000000000000000');
+ ChangelogModel.find({_id: {$lte: id}}).sort({_id: -1}).skip(options.page * options.pagesize).limit(options.pagesize)
+ .lean().exec((err, data) => {
if (err) return next(err);
- res.json(_.compact(data.map(e => RootValidate.changelogOutput(e)))); // validate all and filter null values from validation errors
+ // validate all and filter null values from validation errors
+ res.json(_.compact(data.map(e => RootValidate.changelogOutput(e))));
});
});
diff --git a/src/routes/sample.spec.ts b/src/routes/sample.spec.ts
index d9db97a..c49e462 100644
--- a/src/routes/sample.spec.ts
+++ b/src/routes/sample.spec.ts
@@ -830,7 +830,7 @@ describe('/sample', () => {
url: '/sample/400000000000000000000001',
auth: {basic: 'janedoe'},
httpStatus: 200,
- req: {type: 'part', color: 'signalviolet', batch: '114531', condition: {condition_template: '200000000000000000000003'}, material_id: '100000000000000000000002', notes: {comment: 'Testcomment', sample_references: [{sample_id: '400000000000000000000003', relation: 'part to this sample'}]}}
+ req: {type: 'other', color: 'signalviolet', batch: '114531', condition: {condition_template: '200000000000000000000003'}, material_id: '100000000000000000000002', notes: {comment: 'Testcomment', sample_references: [{sample_id: '400000000000000000000003', relation: 'part to this sample'}]}}
}).end(err => {
if (err) return done (err);
SampleModel.findById('400000000000000000000001').lean().exec((err, data: any) => {
@@ -839,7 +839,7 @@ describe('/sample', () => {
should(data).have.property('_id');
should(data).have.property('number', '1');
should(data).have.property('color', 'signalviolet');
- should(data).have.property('type', 'part');
+ should(data).have.property('type', 'other');
should(data).have.property('batch', '114531');
should(data).have.property('condition', {condition_template: '200000000000000000000003'});
should(data.material_id.toString()).be.eql('100000000000000000000002');
@@ -1054,6 +1054,16 @@ describe('/sample', () => {
res: {status: 'Condition template not available'}
});
});
+ it('rejects a not accepted type', done => {
+ TestHelper.request(server, done, {
+ method: 'put',
+ url: '/sample/400000000000000000000001',
+ auth: {basic: 'janedoe'},
+ httpStatus: 400,
+ req: {type: 'xx'},
+ res: {status: 'Invalid body format', details: '"type" must be one of [granulate, part, tension rod, other]'}
+ });
+ });
it('allows keeping an empty condition empty', done => {
TestHelper.request(server, done, {
method: 'put',
@@ -1937,6 +1947,16 @@ describe('/sample', () => {
res: {status: 'Invalid body format', details: 'Invalid object id'}
});
});
+ it('rejects a not accepted type', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/sample/new',
+ auth: {basic: 'janedoe'},
+ httpStatus: 400,
+ req: {color: 'black', type: 'xx', batch: '1560237365', material_id: '100000000000000000000001', notes: {comment: 'Testcomment'}},
+ res: {status: 'Invalid body format', details: '"type" must be one of [granulate, part, tension rod, other]'}
+ });
+ });
it('rejects an API key', done => {
TestHelper.request(server, done, {
method: 'post',
diff --git a/src/routes/sample.ts b/src/routes/sample.ts
index 10694ac..47e3aa5 100644
--- a/src/routes/sample.ts
+++ b/src/routes/sample.ts
@@ -28,6 +28,7 @@ const router = express.Router();
// TODO: think about filter keys with measurement template versions
+
router.get('/samples', async (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
@@ -35,7 +36,8 @@ router.get('/samples', async (req, res, next) => {
if (error) return res400(error, res);
// TODO: find a better place for these
- const sampleKeys = ['_id', 'color', 'number', 'type', 'batch', 'added', 'condition', 'material_id', 'note_id', 'user_id'];
+ const sampleKeys = ['_id', 'color', 'number', 'type', 'batch', 'added', 'condition', 'material_id', 'note_id',
+ 'user_id'];
// evaluate sort parameter from 'color-asc' to ['color', 1]
filters.sort = filters.sort.split('-');
@@ -74,7 +76,8 @@ router.get('/samples', async (req, res, next) => {
}
else {
// start and end of day
- const date = [new Date(addedFilter.values[0]).setHours(0,0,0,0), new Date(addedFilter.values[0]).setHours(23,59,59,999)];
+ const date = [new Date(addedFilter.values[0]).setHours(0,0,0,0),
+ new Date(addedFilter.values[0]).setHours(23,59,59,999)];
if (addedFilter.mode === 'lt') { // lt start
filters.filters.push({mode: 'lt', field: '_id', values: [dateToOId(date[0])]});
}
@@ -88,7 +91,8 @@ router.get('/samples', async (req, res, next) => {
filters.filters.push({mode: 'gte', field: '_id', values: [dateToOId(date[0])]});
}
if (addedFilter.mode === 'ne') {
- filters.filters.push({mode: 'or', field: '_id', values: [{ _id: { '$lt': dateToOId(date[0])}}, { _id: { '$gt': dateToOId(date[1])}}]});
+ filters.filters.push({mode: 'or', field: '_id',
+ values: [{ _id: { '$lt': dateToOId(date[0])}}, { _id: { '$gt': dateToOId(date[1])}}]});
}
}
}
@@ -103,27 +107,31 @@ router.get('/samples', async (req, res, next) => {
if (filters.sort[0].indexOf('measurements.') >= 0) { // sorting with measurements as starting collection
collection = MeasurementModel;
const [,measurementName, measurementParam] = filters.sort[0].split('.');
- const measurementTemplates = await MeasurementTemplateModel.find({name: measurementName}).lean().exec().catch(err => {next(err);});
+ const measurementTemplates = await MeasurementTemplateModel.find({name: measurementName})
+ .lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return;
if (!measurementTemplates) {
return res.status(400).json({status: 'Invalid body format', details: filters.sort[0] + ' not found'});
}
let sortStartValue = null;
if (filters['from-id']) { // from-id specified, fetch values for sorting
- const fromSample = await MeasurementModel.findOne({sample_id: mongoose.Types.ObjectId(filters['from-id'])}).lean().exec().catch(err => {next(err);}); // TODO: what if more than one measurement for sample?
+ const fromSample = await MeasurementModel.findOne({sample_id: mongoose.Types.ObjectId(filters['from-id'])})
+ .lean().exec().catch(err => {next(err);}); // TODO: what if more than one measurement for sample?
if (fromSample instanceof Error) return;
if (!fromSample) {
return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'});
}
sortStartValue = fromSample.values[measurementParam];
}
- queryPtr[0].$match.$and.push({measurement_template: {$in: measurementTemplates.map(e => e._id)}}); // find measurements to sort
+ // find measurements to sort
+ queryPtr[0].$match.$and.push({measurement_template: {$in: measurementTemplates.map(e => e._id)}});
if (filters.filters.find(e => e.field === filters.sort[0])) { // sorted measurement should also be filtered
- queryPtr[0].$match.$and.push(...filterQueries(filters.filters.filter(e => e.field === filters.sort[0]).map(e => {e.field = 'values.' + e.field.split('.')[2]; return e; })));
+ queryPtr[0].$match.$and.push(...filterQueries(filters.filters.filter(e => e.field === filters.sort[0])
+ .map(e => {e.field = 'values.' + e.field.split('.')[2]; return e; })));
}
queryPtr.push(
...sortQuery(filters, ['values.' + measurementParam, 'sample_id'], sortStartValue), // sort measurements
- {$replaceRoot: {newRoot: {measurement: '$$ROOT'}}}, // fetch samples and restructure them to fit sample structure
+ {$replaceRoot: {newRoot: {measurement: '$$ROOT'}}}, // fetch samples and restructure them to fit sample structure
{$lookup: {from: 'samples', localField: 'measurement.sample_id', foreignField: '_id', as: 'sample'}},
{$match: statusQuery(filters, 'sample.status')}, // filter out wrong status once samples were added
{$addFields: {['sample.' + measurementName]: '$measurement.values'}}, // more restructuring
@@ -159,43 +167,52 @@ router.get('/samples', async (req, res, next) => {
let materialAdded = false;
if (sortFilterKeys.find(e => /material\./.test(e))) { // add material fields
materialAdded = true;
- materialQuery.push( // add material properties
- {$lookup: {from: 'materials', localField: 'material_id', foreignField: '_id', as: 'material'}}, // TODO: project out unnecessary fields
+ materialQuery.push( // add material properties // TODO: project out unnecessary fields
+ {$lookup: {from: 'materials', localField: 'material_id', foreignField: '_id', as: 'material'}},
{$addFields: {material: {$arrayElemAt: ['$material', 0]}}}
);
- const baseMFilters = sortFilterKeys.filter(e => /material\./.test(e)).filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) < 0);
- addFilterQueries(materialQuery, filters.filters.filter(e => baseMFilters.indexOf(e.field) >= 0)); // base material filters
+ const baseMFilters = sortFilterKeys.filter(e => /material\./.test(e))
+ .filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) < 0);
+ // base material filters
+ addFilterQueries(materialQuery, filters.filters.filter(e => baseMFilters.indexOf(e.field) >= 0));
if (sortFilterKeys.find(e => e === 'material.supplier')) { // add supplier if needed
materialQuery.push(
- {$lookup: { from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}},
+ {$lookup: {
+ from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}
+ },
{$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}}
);
}
if (sortFilterKeys.find(e => e === 'material.group')) { // add group if needed
materialQuery.push(
- {$lookup: { from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }},
+ {$lookup: {
+ from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }
+ },
{$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}}
);
}
if (sortFilterKeys.find(e => e === 'material.number')) { // add material number if needed
materialQuery.push(
- {$addFields: {'material.number': { $arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]}}}
+ {$addFields: {'material.number': { $arrayElemAt: [
+ '$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}
+ ]}}}
);
}
- const specialMFilters = sortFilterKeys.filter(e => /material\./.test(e)).filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) >= 0);
- addFilterQueries(materialQuery, filters.filters.filter(e => specialMFilters.indexOf(e.field) >= 0)); // base material filters
+ const specialMFilters = sortFilterKeys.filter(e => /material\./.test(e))
+ .filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) >= 0);
+ // base material filters
+ addFilterQueries(materialQuery, filters.filters.filter(e => specialMFilters.indexOf(e.field) >= 0));
queryPtr.push(...materialQuery);
if (/material\./.test(filters.sort[0])) { // sort by material key
let sortStartValue = null;
if (filters['from-id']) { // from-id specified
- const fromSample = await SampleModel.aggregate([{$match: {_id: mongoose.Types.ObjectId(filters['from-id'])}}, ...materialQuery]).exec().catch(err => {next(err);});
+ const fromSample = await SampleModel.aggregate(
+ [{$match: {_id: mongoose.Types.ObjectId(filters['from-id'])}}, ...materialQuery]
+ ).exec().catch(err => {next(err);});
if (fromSample instanceof Error) return;
if (!fromSample) {
return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'});
}
- console.log(fromSample);
- console.log(filters.sort[0]);
- console.log(fromSample[filters.sort[0]]);
const filterKey = filters.sort[0].split('.');
if (filterKey.length === 2) {
sortStartValue = fromSample[0][filterKey[0]][filterKey[1]];
@@ -208,23 +225,25 @@ router.get('/samples', async (req, res, next) => {
}
}
- const measurementFilterFields = _.uniq(sortFilterKeys.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters
+ const measurementFilterFields = _.uniq(sortFilterKeys.filter(e => /measurements\./.test(e))
+ .map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters
if (sortFilterKeys.find(e => /measurements\./.test(e))) { // add measurement fields
- const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFilterFields}}).lean().exec().catch(err => {next(err);});
+ const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFilterFields}})
+ .lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return;
if (measurementTemplates.length < measurementFilterFields.length) {
return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'});
}
queryPtr.push({$lookup: {
from: 'measurements', let: {sId: '$_id'},
- pipeline: [{$match: {$expr: {$and: [{$eq: ['$sample_id', '$$sId']}, {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}]}}}],
+ pipeline: [{$match: {$expr: {$and: [
+ {$eq: ['$sample_id', '$$sId']},
+ {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}
+ ]}}}],
as: 'measurements'
}});
measurementTemplates.forEach(template => {
- queryPtr.push({$addFields: {[template.name]: {$let: { // add measurements as property [template.name], if one result, array is reduced to direct values
- vars: {arr: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}}}},
- in:{$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
- }}}}, {$addFields: {[template.name]: {$cond: ['$' + template.name + '.values', '$' + template.name + '.values', template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})]}}});
+ addMeasurements(queryPtr, template);
});
addFilterQueries(queryPtr, filters.filters
.filter(e => sortFilterKeys.filter(e => /measurements\./.test(e)).indexOf(e.field) >= 0)
@@ -232,14 +251,18 @@ router.get('/samples', async (req, res, next) => {
); // measurement filters
}
- if (!filters.fields.find(e => /spectrum\./.test(e)) && !filters['from-id']) { // count total number of items before $skip and $limit, only works when from-id is not specified and spectra are not included
+ // count total number of items before $skip and $limit, only works when from-id is not specified and spectra are not
+ // included
+ if (!filters.fields.find(e => /spectrum\./.test(e)) && !filters['from-id']) {
queryPtr.push({$facet: {count: [{$count: 'count'}], samples: []}});
queryPtr = queryPtr[queryPtr.length - 1].$facet.samples; // add rest of aggregation pipeline into $facet
}
// paging
if (filters['to-page']) {
- queryPtr.push({$skip: Math.abs(filters['to-page'] + Number(filters['to-page'] < 0)) * filters['page-size'] + Number(filters['to-page'] < 0)}) // number to skip, if going back pages, one page has to be skipped less but on sample more
+ // number to skip, if going back pages, one page has to be skipped less but on sample more
+ queryPtr.push({$skip: Math.abs(filters['to-page'] + Number(filters['to-page'] < 0)) * filters['page-size'] +
+ Number(filters['to-page'] < 0)})
}
if (filters['page-size']) {
queryPtr.push({$limit: filters['page-size']});
@@ -265,51 +288,65 @@ router.get('/samples', async (req, res, next) => {
}
if (fieldsToAdd.indexOf('material.supplier') >= 0) { // add supplier if needed
queryPtr.push(
- {$lookup: { from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}},
+ {$lookup: {
+ from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'
+ }},
{$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}}
);
}
if (fieldsToAdd.indexOf('material.group') >= 0) { // add group if needed
queryPtr.push(
- {$lookup: { from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }},
+ {$lookup: {
+ from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group'
+ }},
{$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}}
);
}
if (fieldsToAdd.indexOf('material.number') >= 0) { // add material number if needed
queryPtr.push(
- {$addFields: {'material.number': { $arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]}}}
+ {$addFields: {'material.number': {
+ $arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]
+ }}}
);
}
- let measurementFieldsFields: string[] = _.uniq(fieldsToAdd.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters
+ let measurementFieldsFields: string[] = _.uniq(
+ fieldsToAdd.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])
+ ); // filter measurement names and remove duplicates from parameters
if (fieldsToAdd.find(e => /measurements\./.test(e))) { // add measurement fields
- const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFieldsFields}}).lean().exec().catch(err => {next(err);});
+ const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFieldsFields}})
+ .lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return;
if (measurementTemplates.length < measurementFieldsFields.length) {
return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'});
}
- if (fieldsToAdd.find(e => /spectrum\./.test(e))) { // use different lookup methods with and without spectrum for the best performance
- queryPtr.push({$lookup: {from: 'measurements', localField: '_id', foreignField: 'sample_id', as: 'measurements'}});
+ // use different lookup methods with and without spectrum for the best performance
+ if (fieldsToAdd.find(e => /spectrum\./.test(e))) {
+ queryPtr.push(
+ {$lookup: {from: 'measurements', localField: '_id', foreignField: 'sample_id', as: 'measurements'}}
+ );
}
else {
queryPtr.push({$lookup: {
from: 'measurements', let: {sId: '$_id'},
- pipeline: [{$match: {$expr: {$and: [{$eq: ['$sample_id', '$$sId']}, {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}]}}}],
+ pipeline: [{$match: {$expr: {$and: [
+ {$eq: ['$sample_id', '$$sId']},
+ {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}
+ ]}}}],
as: 'measurements'
}});
}
measurementTemplates.forEach(template => { // TODO: hard coded dpt for special treatment, change later
- queryPtr.push({$addFields: {[template.name]: {$let: { // add measurements as property [template.name], if one result, array is reduced to direct values
- vars: {arr: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}}}},
- in:{$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
- }}}}, {$addFields: {[template.name]: {$cond: ['$' + template.name + '.values', '$' + template.name + '.values', template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})]}}});
+ addMeasurements(queryPtr, template);
if (measurementFieldsFields.find(e => e === 'spectrum')) {
queryPtr.push({$unwind: '$spectrum'});
}
});
// if (measurementFieldsFields.find(e => e === 'spectrum')) { // TODO: remove hardcoded as well
// queryPtr.push(
- // {$addFields: {spectrum: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', measurementTemplates.filter(e => e.name === 'spectrum')[0]._id]}}}}},
+ // {$addFields: {spectrum: {$filter: {input: '$measurements', cond: {
+ // $eq: ['$$this.measurement_template', measurementTemplates.filter(e => e.name === 'spectrum')[0]._id]
+ // }}}}},
// {$addFields: {spectrum: '$spectrum.values'}},
// {$unwind: '$spectrum'}
// );
@@ -318,10 +355,11 @@ router.get('/samples', async (req, res, next) => {
queryPtr.push({$project: {measurements: 0}});
}
- const projection = filters.fields.map(e => e.replace('measurements.', '')).reduce((s, e) => {s[e] = true; return s; }, {});
- if (filters.fields.indexOf('added') >= 0) { // add added date
+ const projection = filters.fields.map(e => e.replace('measurements.', ''))
+ .reduce((s, e) => {s[e] = true; return s; }, {});
+ if (filters.fields.indexOf('added') >= 0) { // add added date // TODO: upgrade MongoDB version or find alternative
// projection.added = {$toDate: '$_id'};
- // projection.added = { $convert: { input: '$_id', to: "date" } } // TODO: upgrade MongoDB version or find alternative
+ // projection.added = { $convert: { input: '$_id', to: "date" } }
}
if (filters.fields.indexOf('_id') < 0 && filters.fields.indexOf('added') < 0) { // disable _id explicitly
projection._id = false;
@@ -347,7 +385,10 @@ router.get('/samples', async (req, res, next) => {
if (filters['to-page'] < 0) {
data.reverse();
}
- const measurementFields = _.uniq([filters.sort[0].split('.')[1], ...measurementFilterFields, ...measurementFieldsFields]);
+ const measurementFields = _.uniq(
+ [filters.sort[0].split('.')[1],
+ ...measurementFilterFields, ...measurementFieldsFields]
+ );
if (filters.csv) { // output as csv
csv(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields))), (err, data) => {
if (err) return next(err);
@@ -355,8 +396,8 @@ router.get('/samples', async (req, res, next) => {
res.send(data);
});
}
- else {
- res.json(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields)))); // validate all and filter null values from validation errors
+ else { // validate all and filter null values from validation errors
+ res.json(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields))));
}
});
}
@@ -389,7 +430,8 @@ router.get('/samples/:state(new|deleted)', (req, res, next) => {
SampleModel.find({status: globals.status[req.params.state]}).lean().exec((err, data) => {
if (err) return next(err);
- res.json(_.compact(data.map(e => SampleValidate.output(e)))); // validate all and filter null values from validation errors
+ // validate all and filter null values from validation errors
+ res.json(_.compact(data.map(e => SampleValidate.output(e))));
});
});
@@ -405,7 +447,8 @@ router.get('/samples/count', (req, res, next) => {
router.get('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
- SampleModel.findById(req.params.id).populate('material_id').populate('user_id', 'name').populate('note_id').exec(async (err, sampleData: any) => {
+ SampleModel.findById(req.params.id).populate('material_id').populate('user_id', 'name').populate('note_id')
+ .exec(async (err, sampleData: any) => {
if (err) return next(err);
await sampleReturn(sampleData, req, res, next);
});
@@ -434,8 +477,11 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
else if (sample.hasOwnProperty('color')) {
if (!await materialCheck(sample, res, next, sampleData.material_id)) return;
}
- if (sample.hasOwnProperty('condition') && !(_.isEmpty(sample.condition) && _.isEmpty(sampleData.condition))) { // do not execute check if condition is and was empty
- if (!await conditionCheck(sample.condition, 'change', res, next, sampleData.condition.condition_template.toString() !== sample.condition.condition_template)) return;
+ // do not execute check if condition is and was empty
+ if (sample.hasOwnProperty('condition') && !(_.isEmpty(sample.condition) && _.isEmpty(sampleData.condition))) {
+ if (!await conditionCheck(sample.condition, 'change', res, next,
+ !(sampleData.condition.condition_template &&
+ sampleData.condition.condition_template.toString() === sample.condition.condition_template))) return;
}
if (sample.hasOwnProperty('notes')) {
@@ -443,7 +489,8 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (sampleData.note_id !== null) { // old notes data exists
const data = await NoteModel.findById(sampleData.note_id).lean().exec().catch(err => {next(err);}) as any;
if (data instanceof Error) return;
- newNotes = !_.isEqual(_.pick(IdValidate.stringify(data), _.keys(sample.notes)), sample.notes); // check if notes were changed
+ // check if notes were changed
+ newNotes = !_.isEqual(_.pick(IdValidate.stringify(data), _.keys(sample.notes)), sample.notes);
if (newNotes) {
if (data.hasOwnProperty('custom_fields')) { // update note_fields
customFieldsChange(Object.keys(data.custom_fields), -1, req);
@@ -456,7 +503,8 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (_.keys(sample.notes).length > 0 && newNotes) { // save new notes
if (!await sampleRefCheck(sample, res, next)) return;
- if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) { // new custom_fields
+ // new custom_fields
+ if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) {
customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req);
}
let data = await new NoteModel(sample.notes).save().catch(err => { return next(err)}); // save new notes
@@ -491,11 +539,13 @@ router.delete('/sample/' + IdValidate.parameter(), (req, res, next) => {
// only maintain and admin are allowed to edit other user's data
if (sampleData.user_id.toString() !== req.authDetails.id && !req.auth(res, ['maintain', 'admin'], 'basic')) return;
- await SampleModel.findByIdAndUpdate(req.params.id, {status:globals.status.deleted}).log(req).lean().exec(err => { // set sample status
+ // set sample status
+ await SampleModel.findByIdAndUpdate(req.params.id, {status:globals.status.deleted}).log(req).lean().exec(err => {
if (err) return next(err);
// set status of associated measurements also to deleted
- MeasurementModel.updateMany({sample_id: mongoose.Types.ObjectId(req.params.id)}, {status: -1}).log(req).lean().exec(err => {
+ MeasurementModel.updateMany({sample_id: mongoose.Types.ObjectId(req.params.id)}, {status: -1})
+ .log(req).lean().exec(err => {
if (err) return next(err);
if (sampleData.note_id !== null) { // handle notes
@@ -518,7 +568,8 @@ router.delete('/sample/' + IdValidate.parameter(), (req, res, next) => {
router.get('/sample/number/:number', (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
- SampleModel.findOne({number: req.params.number}).populate('material_id').populate('user_id', 'name').populate('note_id').exec(async (err, sampleData: any) => {
+ SampleModel.findOne({number: req.params.number}).populate('material_id').populate('user_id', 'name')
+ .populate('note_id').exec(async (err, sampleData: any) => {
if (err) return next(err);
await sampleReturn(sampleData, req, res, next);
});
@@ -572,13 +623,15 @@ router.post('/sample/new', async (req, res, next) => {
req.body.condition = {};
}
- const {error, value: sample} = SampleValidate.input(req.body, 'new' + (req.authDetails.level === 'admin' ? '-admin' : ''));
+ const {error, value: sample} =
+ SampleValidate.input(req.body, 'new' + (req.authDetails.level === 'admin' ? '-admin' : ''));
if (error) return res400(error, res);
if (!await materialCheck(sample, res, next)) return;
if (!await sampleRefCheck(sample, res, next)) return;
- if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) { // new custom_fields
+ // new custom_fields
+ if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) {
customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req);
}
@@ -615,21 +668,27 @@ router.get('/sample/notes/fields', (req, res, next) => {
NoteFieldModel.find({}).lean().exec((err, data) => {
if (err) return next(err);
- res.json(_.compact(data.map(e => NoteFieldValidate.output(e)))); // validate all and filter null values from validation errors
+ // validate all and filter null values from validation errors
+ res.json(_.compact(data.map(e => NoteFieldValidate.output(e))));
})
});
module.exports = router;
+// store the highest generated number for each location to avoid duplicate numbers
+const numberBuffer: {[location: string]: number} = {};
-async function numberGenerate (sample, req, res, next) { // generate number in format Location32, returns false on error
+// generate number in format Location32, returns false on error
+async function numberGenerate (sample, req, res, next) {
const sampleData = await SampleModel
.aggregate([
{$match: {number: new RegExp('^' + req.authDetails.location + '[0-9]+$', 'm')}},
- // {$addFields: {number2: {$toDecimal: {$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}}}}, // not working with MongoDb 3.6
+ // {$addFields: {number2: {$toDecimal: {$arrayElemAt: [{$split: [{$arrayElemAt:
+ // [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}}}}, // not working with MongoDb 3.6
{$addFields: {sortNumber: {$let: {
- vars: {tmp: {$concat: ['000000000000000000000000000000', {$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}]}},
+ vars: {tmp: {$concat: ['000000000000000000000000000000',
+ {$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}]}},
in: {$substrCP: ['$$tmp', {$subtract: [{$strLenCP: '$$tmp'}, 30]}, {$strLenCP: '$$tmp'}]}
}}}},
{$sort: {sortNumber: -1}},
@@ -638,11 +697,18 @@ async function numberGenerate (sample, req, res, next) { // generate number in
.exec()
.catch(err => next(err));
if (sampleData instanceof Error) return false;
- return req.authDetails.location + (sampleData[0] ? Number(sampleData[0].number.replace(/[^0-9]+/g, '')) + 1 : 1);
+ let number = (sampleData[0] ? Number(sampleData[0].number.replace(/[^0-9]+/g, '')) : 0);
+ if (numberBuffer[req.authDetails.location] && numberBuffer[req.authDetails.location] >= number) {
+ number = numberBuffer[req.authDetails.location];
+ }
+ number ++;
+ numberBuffer[req.authDetails.location] = number;
+ return req.authDetails.location + number;
}
async function numberCheck(sample, res, next) {
- const sampleData = await SampleModel.findOne({number: sample.number}).lean().exec().catch(err => {next(err); return false;});
+ const sampleData = await SampleModel.findOne({number: sample.number})
+ .lean().exec().catch(err => {next(err); return false;});
if (sampleData) { // found entry with sample number
res.status(400).json({status: 'Sample number already taken'});
return false
@@ -650,7 +716,8 @@ async function numberCheck(sample, res, next) {
return true;
}
-async function materialCheck (sample, res, next, id = sample.material_id) { // validate material_id and color, returns false if invalid
+// validate material_id and color, returns false if invalid
+async function materialCheck (sample, res, next, id = sample.material_id) {
const materialData = await MaterialModel.findById(id).lean().exec().catch(err => next(err)) as any;
if (materialData instanceof Error) return false;
if (!materialData) { // could not find material_id
@@ -660,12 +727,14 @@ async function materialCheck (sample, res, next, id = sample.material_id) { //
return true;
}
-async function conditionCheck (condition, param, res, next, checkVersion = true) { // validate treatment template, returns false if invalid, otherwise template data
+// validate treatment template, returns false if invalid, otherwise template data
+async function conditionCheck (condition, param, res, next, checkVersion = true) {
if (!condition.condition_template || !IdValidate.valid(condition.condition_template)) { // template id not found
res.status(400).json({status: 'Condition template not available'});
return false;
}
- const conditionData = await ConditionTemplateModel.findById(condition.condition_template).lean().exec().catch(err => next(err)) as any;
+ const conditionData = await ConditionTemplateModel.findById(condition.condition_template)
+ .lean().exec().catch(err => next(err)) as any;
if (conditionData instanceof Error) return false;
if (!conditionData) { // template not found
res.status(400).json({status: 'Condition template not available'});
@@ -674,7 +743,8 @@ async function conditionCheck (condition, param, res, next, checkVersion = true)
if (checkVersion) {
// get all template versions and check if given is latest
- const conditionVersions = await ConditionTemplateModel.find({first_id: conditionData.first_id}).sort({version: -1}).lean().exec().catch(err => next(err)) as any;
+ const conditionVersions = await ConditionTemplateModel.find({first_id: conditionData.first_id})
+ .sort({version: -1}).lean().exec().catch(err => next(err)) as any;
if (conditionVersions instanceof Error) return false;
if (condition.condition_template !== conditionVersions[0]._id.toString()) { // template not latest
res.status(400).json({status: 'Old template version not allowed'});
@@ -683,14 +753,16 @@ async function conditionCheck (condition, param, res, next, checkVersion = true)
}
// validate parameters
- const {error, value: ignore} = ParametersValidate.input(_.omit(condition, 'condition_template'), conditionData.parameters, param);
+ const {error, value: ignore} =
+ ParametersValidate.input(_.omit(condition, 'condition_template'), conditionData.parameters, param);
if (error) {res400(error, res); return false;}
return conditionData;
}
function sampleRefCheck (sample, res, next) { // validate sample_references, resolves false for invalid reference
return new Promise(resolve => {
- if (sample.notes.hasOwnProperty('sample_references') && sample.notes.sample_references.length > 0) { // there are sample_references
+ // there are sample_references
+ if (sample.notes.hasOwnProperty('sample_references') && sample.notes.sample_references.length > 0) {
let referencesCount = sample.notes.sample_references.length; // count to keep track of running async operations
sample.notes.sample_references.forEach(reference => {
@@ -715,7 +787,8 @@ function sampleRefCheck (sample, res, next) { // validate sample_references, re
function customFieldsChange (fields, amount, req) { // update custom_fields and respective quantities
fields.forEach(field => {
- NoteFieldModel.findOneAndUpdate({name: field}, {$inc: {qty: amount}} as any, {new: true}).log(req).lean().exec((err, data: any) => { // check if field exists
+ NoteFieldModel.findOneAndUpdate({name: field}, {$inc: {qty: amount}} as any, {new: true})
+ .log(req).lean().exec((err, data: any) => { // check if field exists
if (err) return console.error(err);
if (!data) { // new field
new NoteFieldModel({name: field, qty: 1}).save((err, data) => {
@@ -735,11 +808,27 @@ function customFieldsChange (fields, amount, req) { // update custom_fields and
function sortQuery(filters, sortKeys, sortStartValue) { // sortKeys = ['primary key', 'secondary key']
if (filters['from-id']) { // from-id specified
if ((filters['to-page'] === 0 && filters.sort[1] === 1) || (filters.sort[1] * filters['to-page'] > 0)) { // asc
- return [{$match: {$or: [{[sortKeys[0]]: {$gt: sortStartValue}}, {$and: [{[sortKeys[0]]: sortStartValue}, {[sortKeys[1]]: {$gte: new mongoose.Types.ObjectId(filters['from-id'])}}]}]}},
- {$sort: {[sortKeys[0]]: 1, _id: 1}}];
+ return [
+ {$match: {$or: [
+ {[sortKeys[0]]: {$gt: sortStartValue}},
+ {$and: [
+ {[sortKeys[0]]: sortStartValue},
+ {[sortKeys[1]]: {$gte: new mongoose.Types.ObjectId(filters['from-id'])}}
+ ]}
+ ]}},
+ {$sort: {[sortKeys[0]]: 1, _id: 1}}
+ ];
} else {
- return [{$match: {$or: [{[sortKeys[0]]: {$lt: sortStartValue}}, {$and: [{[sortKeys[0]]: sortStartValue}, {[sortKeys[1]]: {$lte: new mongoose.Types.ObjectId(filters['from-id'])}}]}]}},
- {$sort: {[sortKeys[0]]: -1, _id: -1}}];
+ return [
+ {$match: {$or: [
+ {[sortKeys[0]]: {$lt: sortStartValue}},
+ {$and: [
+ {[sortKeys[0]]: sortStartValue},
+ {[sortKeys[1]]: {$lte: new mongoose.Types.ObjectId(filters['from-id'])}}
+ ]}
+ ]}},
+ {$sort: {[sortKeys[0]]: -1, _id: -1}}
+ ];
}
} else { // sort from beginning
return [{$sort: {[sortKeys[0]]: filters.sort[1], [sortKeys[1]]: filters.sort[1]}}]; // set _id as secondary sort
@@ -775,29 +864,49 @@ function filterQueries (filters) {
return {[e.field]: {['$in']: [new RegExp(e.values[0])]}};
}
else {
- return {[e.field]: {['$' + e.mode]: (e.mode.indexOf('in') >= 0 ? e.values : e.values[0])}}; // add filter criteria as {field: {$mode: value}}, only use first value when mode is not in/nin
+ // add filter criteria as {field: {$mode: value}}, only use first value when mode is not in/nin
+ return {[e.field]: {['$' + e.mode]: (e.mode.indexOf('in') >= 0 ? e.values : e.values[0])}};
}
});
}
+// add measurements as property [template.name], if one result, array is reduced to direct values
+function addMeasurements(queryPtr, template) {
+ queryPtr.push(
+ {$addFields: {[template.name]: {$let: {vars: {
+ arr: {$filter: {
+ input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}
+ }}},
+ in: {$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
+ }}}},
+ {$addFields: {[template.name]: {$cond: [
+ '$' + template.name + '.values',
+ '$' + template.name + '.values',
+ template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})
+ ]}}}
+ );
+}
+
function dateToOId (date) { // convert date to ObjectId
return mongoose.Types.ObjectId(Math.floor(date / 1000).toString(16) + '0000000000000000');
}
async function sampleReturn (sampleData, req, res, next) {
if (sampleData) {
- console.log(sampleData);
- await sampleData.populate('material_id.group_id').populate('material_id.supplier_id').execPopulate().catch(err => next(err));
+ await sampleData.populate('material_id.group_id').populate('material_id.supplier_id')
+ .execPopulate().catch(err => next(err));
if (sampleData instanceof Error) return;
sampleData = sampleData.toObject();
- if (sampleData.status === globals.status.deleted && !req.auth(res, ['maintain', 'admin'], 'all')) return; // deleted samples only available for maintain/admin
+ // deleted samples only available for maintain/admin
+ if (sampleData.status === globals.status.deleted && !req.auth(res, ['maintain', 'admin'], 'all')) return;
sampleData.material = sampleData.material_id; // map data to right keys
sampleData.material.group = sampleData.material.group_id.name;
sampleData.material.supplier = sampleData.material.supplier_id.name;
sampleData.user = sampleData.user_id.name;
sampleData.notes = sampleData.note_id ? sampleData.note_id : {};
- MeasurementModel.find({sample_id: sampleData._id, status: {$ne: globals.status.deleted}}).lean().exec((err, data) => {
+ MeasurementModel.find({sample_id: sampleData._id, status: {$ne: globals.status.deleted}})
+ .lean().exec((err, data) => {
sampleData.measurements = data;
res.json(SampleValidate.output(sampleData, 'details'));
});
diff --git a/src/routes/template.spec.ts b/src/routes/template.spec.ts
index db924b3..f936c46 100644
--- a/src/routes/template.spec.ts
+++ b/src/routes/template.spec.ts
@@ -4,6 +4,7 @@ import TemplateConditionModel from '../models/condition_template';
import TemplateMeasurementModel from '../models/measurement_template';
import TestHelper from "../test/helper";
+// TODO: method to return only latest template versions -> rework frontend accordingly
describe('/template', () => {
let server;
diff --git a/src/routes/validate/sample.ts b/src/routes/validate/sample.ts
index 19f6b50..d92674d 100644
--- a/src/routes/validate/sample.ts
+++ b/src/routes/validate/sample.ts
@@ -15,7 +15,7 @@ export default class SampleValidate {
.allow(''),
type: Joi.string()
- .max(128),
+ .valid('granulate', 'part', 'tension rod'),
batch: Joi.string()
.max(128)
@@ -116,7 +116,8 @@ export default class SampleValidate {
}
}
- static output (data, param = 'refs+added', additionalParams = []) { // validate output and strip unwanted properties, returns null if not valid
+ // validate output and strip unwanted properties, returns null if not valid
+ static output (data, param = 'refs+added', additionalParams = []) {
if (param === 'refs+added') {
param = 'refs';
data.added = data._id.getTimestamp();
@@ -169,12 +170,16 @@ export default class SampleValidate {
if (filterValidation.error) return filterValidation;
try {
for (let i in data.filters) {
+ // data.filters[i] = JSON.parse(decodeURIComponent(data.filters[i]));
data.filters[i] = JSON.parse(data.filters[i]);
data.filters[i].values = data.filters[i].values.map(e => { // validate filter values
let validator;
let field = data.filters[i].field
if (/material\./.test(field)) { // select right validation model
- validator = MaterialValidate.outputV().append({number: Joi.string().max(128).allow(''), properties: Joi.alternatives().try(Joi.number(), Joi.string().max(128))});
+ validator = MaterialValidate.outputV().append({
+ number: Joi.string().max(128).allow(''),
+ properties: Joi.alternatives().try(Joi.number(), Joi.string().max(128))
+ });
field = field.replace('material.', '').split('.')[0];
}
else if (/measurements\./.test(field)) {
@@ -194,12 +199,12 @@ export default class SampleValidate {
validator = Joi.object(this.sample);
}
const {value, error} = validator.validate({[field]: e});
- if (error) throw error; // reject invalid values // TODO: return exact error description, handle in frontend filters
+ if (error) throw error; // reject invalid values
return value[field];
});
}
}
- catch {
+ catch (err) {
return {error: {details: [{message: 'Invalid JSON string for filter parameter'}]}, value: null}
}
}
@@ -208,13 +213,22 @@ export default class SampleValidate {
'from-id': IdValidate.get(),
'to-page': Joi.number().integer(),
'page-size': Joi.number().integer().min(1),
- sort: Joi.string().pattern(new RegExp('^(' + this.sortKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')-(asc|desc)$', 'm')).default('_id-asc'),
+ sort: Joi.string().pattern(
+ new RegExp('^(' + this.sortKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')-(asc|desc)$', 'm')
+ ).default('_id-asc'),
csv: Joi.boolean().default(false),
- fields: Joi.array().items(Joi.string().pattern(new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm'))).default(['_id','number','type','batch','material_id','color','condition','note_id','user_id','added']).messages({'string.pattern.base': 'Invalid field name'}),
+ fields: Joi.array().items(Joi.string().pattern(
+ new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')
+ )).default(['_id','number','type','batch','material_id','color','condition','note_id','user_id','added'])
+ .messages({'string.pattern.base': 'Invalid field name'}),
filters: Joi.array().items(Joi.object({
mode: Joi.string().valid('eq', 'ne', 'lt', 'lte', 'gt', 'gte', 'in', 'nin', 'stringin'),
- field: Joi.string().pattern(new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')).messages({'string.pattern.base': 'Invalid filter field name'}),
- values: Joi.array().items(Joi.alternatives().try(Joi.string().max(128), Joi.number(), Joi.boolean(), Joi.date().iso(), Joi.object())).min(1)
+ field: Joi.string().pattern(
+ new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')
+ ).messages({'string.pattern.base': 'Invalid filter field name'}),
+ values: Joi.array().items(Joi.alternatives().try(
+ Joi.string().max(128), Joi.number(), Joi.boolean(), Joi.date().iso(), Joi.object()
+ )).min(1)
})).default([])
}).with('to-page', 'page-size').validate(data);
}
diff --git a/src/routes/validate/template.ts b/src/routes/validate/template.ts
index aed8f68..4616a70 100644
--- a/src/routes/validate/template.ts
+++ b/src/routes/validate/template.ts
@@ -1,7 +1,7 @@
import Joi from '@hapi/joi';
import IdValidate from './id';
-// TODO: do not allow a . in the name
+// TODO: do not allow a . in the name !!!
export default class TemplateValidate {
private static template = {
name: Joi.string()
diff --git a/src/test/db.json b/src/test/db.json
index 7930a94..1987846 100644
--- a/src/test/db.json
+++ b/src/test/db.json
@@ -99,7 +99,7 @@
{
"_id": {"$oid":"400000000000000000000007"},
"number": "34",
- "type": "liquid",
+ "type": "other",
"color": "black",
"batch": "",
"condition": {},
diff --git a/src/test/helper.ts b/src/test/helper.ts
index 44085f7..6fe16d8 100644
--- a/src/test/helper.ts
+++ b/src/test/helper.ts
@@ -29,7 +29,10 @@ export default class TestHelper {
}
static beforeEach (server, done) {
- delete require.cache[require.resolve('../index')]; // prevent loading from cache
+ // delete cached server code except models as these are needed in the testing files as well
+ Object.keys(require.cache).filter(e => /API\\dist\\(?!(models|db|test))/.test(e)).forEach(key => {
+ delete require.cache[key]; // prevent loading from cache
+ });
server = require('../index');
db.drop(err => { // reset database
if (err) return done(err);
@@ -38,10 +41,13 @@ export default class TestHelper {
return server
}
- static request (server, done, options) { // options in form: {method, url, contentType, auth: {key/basic: 'name' or 'key'/{name, pass}}, httpStatus, req, res, default (set to false if you want to dismiss default .end handling)}
+ // options in form: {method, url, contentType, auth: {key/basic: 'name' or 'key'/{name, pass}}, httpStatus, req, res,
+ // default (set to false if you want to dismiss default .end handling)}
+ static request (server, done, options) {
let st = supertest(server);
if (options.hasOwnProperty('auth') && options.auth.hasOwnProperty('key')) { // resolve API key
- options.url += '?key=' + (this.auth.hasOwnProperty(options.auth.key)? this.auth[options.auth.key].key : options.auth.key);
+ options.url += '?key=' +
+ (this.auth.hasOwnProperty(options.auth.key)? this.auth[options.auth.key].key : options.auth.key);
}
switch (options.method) { // http method
case 'get':
@@ -91,10 +97,12 @@ export default class TestHelper {
done();
});
}
- else if (options.hasOwnProperty('log')) { // check changelog, takes log: {collection, skip, data/(dataAdd, dataIgn)}
+ // check changelog, takes log: {collection, skip, data/(dataAdd, dataIgn)}
+ else if (options.hasOwnProperty('log')) {
return st.end(err => {
if (err) return done (err);
- ChangelogModel.findOne({}).sort({_id: -1}).skip(options.log.skip? options.log.skip : 0).lean().exec((err, data) => { // latest entry
+ ChangelogModel.findOne({}).sort({_id: -1}).skip(options.log.skip? options.log.skip : 0)
+ .lean().exec((err, data) => { // latest entry
if (err) return done(err);
should(data).have.only.keys('_id', 'action', 'collectionName', 'conditions', 'data', 'user_id', '__v');
should(data).have.property('action', options.method.toUpperCase() + ' ' + options.url);