Archived
2

fixed testing cache

This commit is contained in:
VLE2FE 2020-08-04 13:54:14 +02:00
parent 499553dd7f
commit 821b2664bd
18 changed files with 466 additions and 174 deletions

View File

@ -1,12 +1,46 @@
<component name="ProjectDictionaryState"> <component name="ProjectDictionaryState">
<dictionary name="VLE2FE"> <dictionary name="VLE2FE">
<words> <words>
<w>akro</w>
<w>amodel</w>
<w>anwendungsbeschränkt</w>
<w>batchgranulate</w>
<w>bcrypt</w> <w>bcrypt</w>
<w>bnpd</w>
<w>cfenv</w> <w>cfenv</w>
<w>colordesignatiomsuppl</w>
<w>colordesignationsuppl</w>
<w>contentin</w>
<w>dfopdb</w> <w>dfopdb</w>
<w>dosiergeschw</w>
<w>dpts</w>
<w>einspritzgeschw</w>
<w>frameguard</w>
<w>functionlink</w>
<w>glassfibrecontent</w>
<w>janedoe</w> <w>janedoe</w>
<w>johnnydoe</w>
<w>kfingew</w>
<w>latamid</w>
<w>lati</w>
<w>lyucy</w>
<w>materialnumber</w>
<w>pagesize</w> <w>pagesize</w>
<w>pnach</w>
<w>preaged</w>
<w>reinforcementmaterial</w>
<w>reinforcingmaterial</w>
<w>samplenumber</w>
<w>sdpt</w>
<w>signalviolet</w>
<w>solvay</w>
<w>spaceless</w>
<w>stabwn</w>
<w>stanyl</w>
<w>stringin</w>
<w>testcomment</w> <w>testcomment</w>
<w>ultramid</w>
<w>vorgealtert</w>
</words> </words>
</dictionary> </dictionary>
</component> </component>

View File

@ -2,6 +2,7 @@
<profile version="1.0"> <profile version="1.0">
<option name="myName" value="Project Default" /> <option name="myName" value="Project Default" />
<inspection_tool class="JSUnfilteredForInLoop" enabled="false" level="WARNING" enabled_by_default="false" /> <inspection_tool class="JSUnfilteredForInLoop" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="LongLine" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="ReservedWordUsedAsNameJS" enabled="false" level="WARNING" enabled_by_default="false" /> <inspection_tool class="ReservedWordUsedAsNameJS" enabled="false" level="WARNING" enabled_by_default="false" />
</profile> </profile>
</component> </component>

View File

@ -34,8 +34,9 @@ info:
<li>0: newly added/changed</li> <li>0: newly added/changed</li>
<li>10: validated</li> <li>10: validated</li>
</ul> </ul>
<a href="https://sourcecode.socialcoding.bosch.com/users/vle2fe/repos/dfop-api/">Bitbucket repository</a> <a href="https://sourcecode.socialcoding.bosch.com/users/vle2fe/repos/dfop-api/">Bitbucket repository API</a>
# TODO: Link to new documentation page <a href="https://sourcecode.socialcoding.bosch.com/users/vle2fe/repos/dfop-ui/">Bitbucket repository UI</a>
<a href="https://definma.apps.de1.bosch-iot-cloud.com/documentation">Documentation page</a>
servers: servers:

View File

@ -40,6 +40,8 @@
level: level:
type: string type: string
example: read example: read
user_id:
$ref: 'api.yaml#/components/schemas/Id'
401: 401:
$ref: 'api.yaml#/components/responses/401' $ref: 'api.yaml#/components/responses/401'
500: 500:

View File

@ -49,6 +49,8 @@ let commentsLog = [];
let customFieldsLog = []; let customFieldsLog = [];
const vzValues = {}; // vz values from comments const vzValues = {}; // vz values from comments
const dptLog = []; const dptLog = [];
const dptSampleAddLog = []; // log samples created during dpt insertion
const typeLog = [];
// TODO: conditions // TODO: conditions
@ -75,6 +77,7 @@ async function main() {
} }
// write logs // write logs
fs.writeFileSync('./data_import/comments.txt', commentsLog.join('\r\n')); fs.writeFileSync('./data_import/comments.txt', commentsLog.join('\r\n'));
fs.writeFileSync('./data_import/typeLog.txt', typeLog.join('\r\n'));
fs.writeFileSync('./data_import/customFields.txt', customFieldsLog.join('\r\n')); fs.writeFileSync('./data_import/customFields.txt', customFieldsLog.join('\r\n'));
fs.writeFileSync('./data_import/sampleReferences.txt', sampleReferences.map(e => JSON.stringify(e)).join('\r\n')); fs.writeFileSync('./data_import/sampleReferences.txt', sampleReferences.map(e => JSON.stringify(e)).join('\r\n'));
fs.writeFileSync('./data_import/sampleReferences.json', JSON.stringify(sampleReferences)); fs.writeFileSync('./data_import/sampleReferences.json', JSON.stringify(sampleReferences));
@ -84,6 +87,7 @@ async function main() {
if (stages.dpt) { // DPT if (stages.dpt) { // DPT
await allDpts(); await allDpts();
fs.writeFileSync('./data_import/sdptLog.txt', dptLog.join('\r\n')); fs.writeFileSync('./data_import/sdptLog.txt', dptLog.join('\r\n'));
fs.writeFileSync('./data_import/dptSampleAddLog.txt', dptSampleAddLog.join('\r\n'));
} }
if (0) { // pdf test if (0) { // pdf test
console.log(await readPdf('N28_BN05-OX023_2019-07-16.pdf')); console.log(await readPdf('N28_BN05-OX023_2019-07-16.pdf'));
@ -95,24 +99,23 @@ async function main() {
} }
async function importCsv(doc) { async function importCsv(doc) {
// Uniform name samplenumber materialnumber materialname supplier material plastic reinforcingmaterial granulate/part color charge/batch comments vz(ml/g) kfingew% degradation(%) glassfibrecontent(%) stabwn // Uniform name samplenumber materialnumber materialname supplier material plastic reinforcingmaterial granulate/part color charge/batch comments vz(ml/g) kfingew% degradation(%) reinforcingmaterialcontent stabwn
// Metadata__AnP2.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, granulate/Part,Color,Charge/ Batch, Comments // Metadata__AnP2.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,granulate/Part,Color,Charge/ Batch, Comments
// Metadata__AnP2_A.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part, Comments, Humidity [ppm] // Metadata__AnP2_A.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part, Comments, Humidity [ppm]
// Metadata__AnP2_B.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part, VZ [ml/g], glass fibre content // Metadata__AnP2_B.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part, VZ [ml/g], glass fibre content
// Metadata_Ap.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material, Granulate/Part,Color,Charge/Batch, Comments // Metadata_Ap.csv Sample number,Material number,Material name,Supplier, Plastic,Reinforcing material,Granulate/Part,Color,Charge/Batch, Comments
// Metadata_Bj.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Color,Charge/batch granulate/part,Comments // Metadata_Bj.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Color,Charge/batch granulate/part,Comments
// Metadata_Eh.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material, Granulate/Part,Color,Charge/Batch granulate/part,Comments, VZ [cm³/g], Spalte1 // Metadata_Eh.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material,Granulate/Part,Color,Charge/Batch granulate/part,Comments, VZ [cm³/g], Spalte1
// Metadata_Eh_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material, Granulate/Part,Color, Comments, VZ [cm³/g] // Metadata_Eh_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material,Granulate/Part,Color, Comments, VZ [cm³/g]
// Metadata_Eh_Duroplasten.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material, Granulate/Part,Color,Charge/Batch granulate/part,Comments // Metadata_Eh_Duroplasten.csv Sample number,Material number,Material name,Supplier,Material, Reinforcing material,Granulate/Part,Color,Charge/Batch granulate/part,Comments
// Metadata_Rng_aktuell.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Color,Charge/batch granulate/part,Comments, VZ (ml/g), Degradation(%),Glas fibre content (%) // Metadata_Rng_aktuell.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Color,Charge/batch granulate/part,Comments, VZ (ml/g), Degradation(%),Glas fibre content (%)
// Metadata_Rng_aktuell_A.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material, Granulate/Part,Farbe,Charge/batch granulate/part,Comments, KF in Gew%, Stabwn // Metadata_Rng_aktuell_A.csv Sample number,Material number,Material name,Supplier,Material,Plastic,Reinforcing material,Granulate/Part,Farbe,Charge/batch granulate/part,Comments, KF in Gew%, Reinforcing material (content in %),Stabwn
// Metadata_Rng_aktuell_B.csv Sample number, Material name,Supplier, Plastic,Reinforcing material (content in %),Granulate/Part, Comments, VZ (ml/g), Degradation (%), Alterungszeit in h // Metadata_Rng_aktuell_B.csv Sample number, Material name,Supplier, Plastic, Granulate/Part, Comments, VZ (ml/g), Degradation (%), Alterungszeit in h
// Metadata_WaP.csv Probennummer, Name, Firma, Material, Teil/Rohstoff, Charge, Anmerkung,VZ (ml/g), Abbau (%), Verstärkungsstoffgehalt (%), Versuchsnummer // Metadata_WaP.csv Probennummer, Name, Firma, Material, Teil/Rohstoff, Charge, Anmerkung,VZ (ml/g), Abbau (%), Verstärkungsstoffgehalt (%), Versuchsnummer
const nameCorrection = { // map to right column names const nameCorrection = { // map to right column names
'probennummer': 'samplenumber', 'probennummer': 'samplenumber',
'name': 'materialname', 'name': 'materialname',
'firma': 'supplier', 'firma': 'supplier',
'reinforcingmaterial(contentin%)': 'reinforcingmaterial',
'teil/rohstoff': 'granulate/part', 'teil/rohstoff': 'granulate/part',
'charge/batchgranulate/part': 'charge/batch', 'charge/batchgranulate/part': 'charge/batch',
'charge': 'charge/batch', 'charge': 'charge/batch',
@ -120,7 +123,10 @@ async function importCsv(doc) {
'vz[ml/g]': 'vz(ml/g)', 'vz[ml/g]': 'vz(ml/g)',
'vz[cm³/g]': 'vz(ml/g)', 'vz[cm³/g]': 'vz(ml/g)',
'abbau(%)': 'degradation(%)', 'abbau(%)': 'degradation(%)',
'verstärkungsstoffgehalt(%)': 'glassfibrecontent(%)' 'glassfibrecontent': 'reinforcingmaterialcontent',
'glasfibrecontent(%)': 'reinforcingmaterialcontent',
'reinforcingmaterial(contentin%)': 'reinforcingmaterialcontent',
'verstärkungsstoffgehalt(%)': 'reinforcingmaterialcontent'
}; };
const missingFieldsFill = [ // column names to fill if they do not exist const missingFieldsFill = [ // column names to fill if they do not exist
'color', 'color',
@ -129,7 +135,7 @@ async function importCsv(doc) {
'materialnumber', 'materialnumber',
'reinforcementmaterial' 'reinforcementmaterial'
] ]
console.log('importing ' + doc); console.info('importing ' + doc);
data = []; data = [];
await new Promise(resolve => { await new Promise(resolve => {
fs.createReadStream(doc) fs.createReadStream(doc)
@ -158,9 +164,9 @@ async function importCsv(doc) {
newE[field] = ''; newE[field] = '';
} }
}); });
// if(newE['materialname'] === '') { // TODO: is this replacement okay? if(newE['materialname'] === '') {
// newE['materialname'] = newE['material']; newE['materialname'] = newE['material'];
// } }
if (newE['supplier'] === '') { // empty supplier fields if (newE['supplier'] === '') { // empty supplier fields
newE['supplier'] = 'unknown'; newE['supplier'] = 'unknown';
} }
@ -211,12 +217,68 @@ async function allDpts() {
res.data.forEach(sample => { res.data.forEach(sample => {
sampleIds[sample.number] = sample._id; sampleIds[sample.number] = sample._id;
}); });
const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+_\d+).DPT/; const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+[_.]\d+)(_JDX)?[.]{1,2}(DPT|csv|CSV)/;
const dpts = fs.readdirSync(dptFiles); const dpts = fs.readdirSync(dptFiles);
for (let i in dpts) { for (let i in dpts) {
const regexRes = dptRegex.exec(dpts[i]) let regexInput;
const bjRes = /^(Bj[FT]?)\s?([a-z0-9_]*)_JDX.DPT/.exec(dpts[i]);
if (bjRes) {
regexInput = `Bj01_${bjRes[1]}${bjRes[2]}_0.DPT`;
}
else {
regexInput = dpts[i].replace(/_JDX.*\./, '.');
}
const regexRes = dptRegex.exec(regexInput);
if (regexRes && !sampleIds[regexRes[2]]) { // when sample number includes an additional _x instead of having _x_x for spectrum description
regexRes[2] = `${regexRes[2]}_${regexRes[3].split('_')[0]}`;
}
if (regexRes && !sampleIds[regexRes[2]] && sampleIds[regexRes[2].split('_')[0]]) { // when number_abx does not exist but number
dptSampleAddLog.push(`Trying to find ${regexRes[2].split('_')[0]}`);
dptSampleAddLog.push(host + '/sample/' + sampleIds[regexRes[2].split('_')[0]]);
res = await axios({
method: 'get',
url: host + '/sample/number/' + sampleIds[regexRes[2].split('_')[0]],
auth: {
username: 'admin',
password: 'Abc123!#'
}
}).catch(err => {
if (err.response) {
console.error(err.response.data);
errors.push(`DPT Could not fetch sample ${regexRes[2].split('_')[0]}: ${err.response.data}`);
}
});
if (res.data) {
dptSampleAddLog.push(JSON.stringify(res.data));
const data = _.merge(_.pick(res.data, ['color', 'type', 'batch', 'material_id']), {number: regexRes[2], condition: {}, notes: {}});
res = await axios({
method: 'get',
url: host + '/sample/new',
auth: {
username: res.data.user,
password: res.data.user === 'admin' ? 'Abc123!#' : '2020DeFinMachen!'
},
data
}).catch(err => {
if (err.response) {
console.error(err.response.data);
errors.push(`DPT Could not save sample ${data}: ${err.response.data}`);
}
});
console.error(res);
console.error(data);
if (res.data) {
dptSampleAddLog.push(`${regexRes[2]} from ${regexRes[2].split('_')[0]}`)
sampleIds[regexRes[2]] = res.data._id;
}
else {
console.error(res);
console.error(data);
}
}
}
if (regexRes && sampleIds[regexRes[2]]) { // found matching sample if (regexRes && sampleIds[regexRes[2]]) { // found matching sample
console.log(`${dpts[i]} -> ${regexRes[2]}`); console.log(`${i}/${dpts.length} ${dpts[i]} -> ${regexRes[2]}`);
dptLog.push(`${dpts[i]}, ${regexRes[2]}`); dptLog.push(`${dpts[i]}, ${regexRes[2]}`);
const f = fs.readFileSync(dptFiles + '\\' + dpts[i], 'utf-8'); const f = fs.readFileSync(dptFiles + '\\' + dpts[i], 'utf-8');
const data = { const data = {
@ -225,10 +287,10 @@ async function allDpts() {
measurement_template measurement_template
}; };
data.values.device = regexRes[1]; data.values.device = regexRes[1];
data.values.dpt = f.split('\r\n').map(e => e.split(',')); data.values.dpt = f.split('\r\n').map(e => e.split(',').map(e => parseFloat(e)));
let rescale = false; let rescale = false;
for (let i in data.values.dpt) { for (let i in data.values.dpt) {
if (data.values.dpt[i][1] > 2) { if (data.values.dpt[i][1] > 10) {
rescale = true; rescale = true;
break; break;
} }
@ -258,7 +320,12 @@ async function allDpts() {
} }
else { else {
console.log(`Could not find sample for ${dpts[i]}`); console.log(`Could not find sample for ${dpts[i]}`);
errors.push(`Could not find sample for ${dpts[i]}`); if (regexRes) {
errors.push(`Could not find sample for ${dpts[i]}; [DEBUG] ${regexRes[2]}, ${!sampleIds[regexRes[2]]}, ${sampleIds[regexRes[2].split('_')[0]]}`);
}
else {
errors.push(`Could not find sample for ${dpts[i]} (did not match RegEx)`);
}
} }
} }
} }
@ -274,6 +341,7 @@ async function allKfVz() {
}); });
const kf_template = res.data.filter(e => e.name === 'kf').sort((a, b) => b.version - a.version)[0]._id; const kf_template = res.data.filter(e => e.name === 'kf').sort((a, b) => b.version - a.version)[0]._id;
const vz_template = res.data.filter(e => e.name === 'vz').sort((a, b) => b.version - a.version)[0]._id; const vz_template = res.data.filter(e => e.name === 'vz').sort((a, b) => b.version - a.version)[0]._id;
const rmc_template = res.data.filter(e => e.name === 'reinforcement material content').sort((a, b) => b.version - a.version)[0]._id;
res = await axios({ res = await axios({
method: 'get', method: 'get',
url: host + '/samples?status=all', url: host + '/samples?status=all',
@ -289,6 +357,7 @@ async function allKfVz() {
for (let index in data) { for (let index in data) {
console.info(`KF/VZ ${index}/${data.length}`); console.info(`KF/VZ ${index}/${data.length}`);
let sample = data[index]; let sample = data[index];
sample['samplenumber'] = sample['samplenumber'].replace(/[A-Z][a-z]0\d_/, '');
let credentials = ['admin', 'Abc123!#']; let credentials = ['admin', 'Abc123!#'];
if (sampleDevices[sample['samplenumber']]) { if (sampleDevices[sample['samplenumber']]) {
credentials = [sampleDevices[sample['samplenumber']], '2020DeFinMachen!'] credentials = [sampleDevices[sample['samplenumber']], '2020DeFinMachen!']
@ -339,6 +408,27 @@ async function allKfVz() {
errors.push(`KF/VZ upload for ${JSON.stringify(sample)} failed: ${JSON.stringify(err.response.data)}`); errors.push(`KF/VZ upload for ${JSON.stringify(sample)} failed: ${JSON.stringify(err.response.data)}`);
}); });
} }
if (sample['reinforcingmaterialcontent']) {
await axios({
method: 'post',
url: host + '/measurement/new',
auth: {
username: credentials[0],
password: credentials[1]
},
data: {
sample_id: sampleIds[sample['samplenumber']],
measurement_template: rmc_template,
values: {
percentage: Number(sample['reinforcingmaterialcontent'].replace('%', '').replace(',', '.'))
}
}
}).catch(err => {
console.log(sample['samplenumber']);
console.error(err.response.data);
errors.push(`KF/VZ upload for ${JSON.stringify(sample)} failed: ${JSON.stringify(err.response.data)}`);
});
}
} }
} }
@ -436,7 +526,6 @@ async function saveSamples() {
console.info(`SAMPLE SAVE ${i}/${samples.length}`); console.info(`SAMPLE SAVE ${i}/${samples.length}`);
let credentials = ['admin', 'Abc123!#']; let credentials = ['admin', 'Abc123!#'];
if (sampleDevices[samples[i].number]) { if (sampleDevices[samples[i].number]) {
console.log(sampleDevices[samples[i].number]);
credentials = [sampleDevices[samples[i].number], '2020DeFinMachen!'] credentials = [sampleDevices[samples[i].number], '2020DeFinMachen!']
} }
await axios({ await axios({
@ -520,7 +609,7 @@ async function allMaterials() {
password: 'Abc123!#' password: 'Abc123!#'
} }
}); });
const materialTemplate = res.data.find(e => e.name === 'plastic')._id; const materialTemplate = res.data.filter(e => e.name === 'plastic').sort((a, b) => b.version - a.version)[0]._id;
// process all samples // process all samples
for (let index in data) { for (let index in data) {
@ -718,7 +807,7 @@ function readPdf(file) {
let lastLastText = ''; // text of last last item let lastLastText = ''; // text of last last item
await new pdfReader.PdfReader().parseFileItems(nmDocs + '\\' + file, (err, item) => { await new pdfReader.PdfReader().parseFileItems(nmDocs + '\\' + file, (err, item) => {
if (item && item.text) { if (item && item.text) {
if ((stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignationsuppl') >= 0) || (stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignatiomsupplier') >= 0)) { // table area starts if ((stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignationsuppl') >= 0) || (stripSpaces(lastLastText + lastText + item.text).toLowerCase().indexOf('colordesignatiomsuppl') >= 0)) { // table area starts
table = countdown; table = countdown;
} }
if (table > 0) { if (table > 0) {
@ -865,7 +954,10 @@ function customFields (comment, sampleNumber) {
function sampleType (type) { function sampleType (type) {
const allowedTypes = ['tension rod', 'part', 'granulate']; const allowedTypes = ['tension rod', 'part', 'granulate'];
return allowedTypes.indexOf(type) >= 0 ? type : (type === '' ? 'unknown' : 'other'); if (allowedTypes.indexOf(type) < 0) {
typeLog.push(type);
}
return allowedTypes.indexOf(type) >= 0 ? type : 'part';
} }
function stripSpaces(s) { function stripSpaces(s) {

5
package-lock.json generated
View File

@ -2522,11 +2522,6 @@
} }
} }
}, },
"mongo-sanitize": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/mongo-sanitize/-/mongo-sanitize-1.1.0.tgz",
"integrity": "sha512-6gB9AiJD+om2eZLxaPKIP5Q8P3Fr+s+17rVWso7hU0+MAzmIvIMlgTYuyvalDLTtE/p0gczcvJ8A3pbN1XmQ/A=="
},
"mongodb": { "mongodb": {
"version": "3.4.1", "version": "3.4.1",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.4.1.tgz", "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.4.1.tgz",

View File

@ -11,6 +11,7 @@
"test": "mocha dist/**/**.spec.js", "test": "mocha dist/**/**.spec.js",
"start": "node index.js", "start": "node index.js",
"dev": "nodemon -e ts,yaml --exec \"tsc && node dist/index.js || exit 1\"", "dev": "nodemon -e ts,yaml --exec \"tsc && node dist/index.js || exit 1\"",
"start-local": "node dist/index.js",
"loadDev": "node dist/test/loadDev.js", "loadDev": "node dist/test/loadDev.js",
"coverage": "tsc && nyc --reporter=html --reporter=text mocha dist/**/**.spec.js --timeout 5000", "coverage": "tsc && nyc --reporter=html --reporter=text mocha dist/**/**.spec.js --timeout 5000",
"import": "node data_import/import.js" "import": "node data_import/import.js"
@ -35,7 +36,6 @@
"json-schema": "^0.2.5", "json-schema": "^0.2.5",
"json2csv": "^5.0.1", "json2csv": "^5.0.1",
"lodash": "^4.17.15", "lodash": "^4.17.15",
"mongo-sanitize": "^1.1.0",
"mongoose": "^5.8.7", "mongoose": "^5.8.7",
"swagger-ui-dist": "^3.30.2" "swagger-ui-dist": "^3.30.2"
}, },

View File

@ -7,7 +7,7 @@ import ChangelogModel from './models/changelog';
// database urls, prod db url is retrieved automatically // database urls, prod db url is retrieved automatically
const TESTING_URL = 'mongodb://localhost/dfopdb_test'; const TESTING_URL = 'mongodb://localhost/dfopdb_test';
const DEV_URL = 'mongodb://localhost/dfopdb'; const DEV_URL = 'mongodb://localhost/dfopdb';
const debugging = true; const debugging = false;
if (process.env.NODE_ENV !== 'production' && debugging) { if (process.env.NODE_ENV !== 'production' && debugging) {
mongoose.set('debug', true); // enable mongoose debug mongoose.set('debug', true); // enable mongoose debug
@ -114,6 +114,9 @@ export default class db {
Object.keys(json.collections).forEach(collectionName => { // create each collection Object.keys(json.collections).forEach(collectionName => { // create each collection
json.collections[collectionName] = this.oidResolve(json.collections[collectionName]); json.collections[collectionName] = this.oidResolve(json.collections[collectionName]);
this.state.db.createCollection(collectionName, (err, collection) => { this.state.db.createCollection(collectionName, (err, collection) => {
if (err) {
console.error(err);
}
collection.insertMany(json.collections[collectionName], () => { // insert JSON data collection.insertMany(json.collections[collectionName], () => { // insert JSON data
if (++ loadCounter >= Object.keys(json.collections).length) { // all collections loaded if (++ loadCounter >= Object.keys(json.collections).length) { // all collections loaded
done(); done();

View File

@ -2,7 +2,6 @@ import express from 'express';
import bodyParser from 'body-parser'; import bodyParser from 'body-parser';
import compression from 'compression'; import compression from 'compression';
import contentFilter from 'content-filter'; import contentFilter from 'content-filter';
import mongoSanitize from 'mongo-sanitize';
import helmet from 'helmet'; import helmet from 'helmet';
import cors from 'cors'; import cors from 'cors';
import api from './api'; import api from './api';
@ -11,7 +10,8 @@ import db from './db';
// TODO: check header, also in UI // TODO: check header, also in UI
// tell if server is running in debug or production environment // tell if server is running in debug or production environment
console.info(process.env.NODE_ENV === 'production' ? '===== PRODUCTION =====' : process.env.NODE_ENV === 'test' ? '' :'===== DEVELOPMENT ====='); console.info(process.env.NODE_ENV === 'production' ?
'===== PRODUCTION =====' : process.env.NODE_ENV === 'test' ? '' :'===== DEVELOPMENT =====');
// mongodb connection // mongodb connection
@ -61,15 +61,15 @@ app.use('/static/img/bosch-logo.svg', helmet.contentSecurityPolicy({
})); }));
// middleware // middleware
app.use(contentFilter()); // filter URL query attacks app.use(compression()); // compress responses
app.use(express.json({ limit: '5mb'})); app.use(express.json({ limit: '5mb'}));
app.use(express.urlencoded({ extended: false, limit: '5mb' })); app.use(express.urlencoded({ extended: false, limit: '5mb' }));
app.use(compression()); // compress responses
app.use(bodyParser.json()); app.use(bodyParser.json());
app.use((req, res, next) => { // filter body query attacks const injectionBlackList = ['$', '{', '&&', '||'];
req.body = mongoSanitize(req.body); app.use(contentFilter({
next(); urlBlackList: injectionBlackList,
}); bodyBlackList: injectionBlackList
})); // filter URL query attacks
app.use((err, req, res, ignore) => { // bodyParser error handling app.use((err, req, res, ignore) => { // bodyParser error handling
res.status(400).send({status: 'Invalid JSON body'}); res.status(400).send({status: 'Invalid JSON body'});
}); });

View File

@ -179,7 +179,7 @@ describe('/', () => {
url: '/authorized', url: '/authorized',
auth: {key: 'admin'}, auth: {key: 'admin'},
httpStatus: 200, httpStatus: 200,
res: {status: 'Authorization successful', method: 'key', level: 'admin'} res: {status: 'Authorization successful', method: 'key', level: 'admin', user_id: '000000000000000000000003'}
}); });
}); });
it('works with basic auth', done => { it('works with basic auth', done => {
@ -188,7 +188,7 @@ describe('/', () => {
url: '/authorized', url: '/authorized',
auth: {basic: 'admin'}, auth: {basic: 'admin'},
httpStatus: 200, httpStatus: 200,
res: {status: 'Authorization successful', method: 'basic', level: 'admin'} res: {status: 'Authorization successful', method: 'basic', level: 'admin', user_id: '000000000000000000000003'}
}); });
}); });
}); });
@ -207,17 +207,17 @@ describe('/', () => {
}); });
}); });
describe('A not connected database', () => { // RUN AS LAST OR RECONNECT DATABASE!! // describe('A not connected database', () => { // RUN AS LAST OR RECONNECT DATABASE!!
it('resolves to an 500 error', done => { // it('resolves to an 500 error', done => {
db.disconnect(() => { // db.disconnect(() => {
TestHelper.request(server, done, { // TestHelper.request(server, done, {
method: 'get', // method: 'get',
url: '/', // url: '/',
httpStatus: 500 // httpStatus: 500
}); // });
}); // });
}); // });
}); // });
}); });
describe('The /api/{url} redirect', () => { describe('The /api/{url} redirect', () => {
@ -242,15 +242,15 @@ describe('The /api/{url} redirect', () => {
url: '/api/authorized', url: '/api/authorized',
auth: {basic: 'admin'}, auth: {basic: 'admin'},
httpStatus: 200, httpStatus: 200,
res: {status: 'Authorization successful', method: 'basic', level: 'admin'} res: {status: 'Authorization successful', method: 'basic', level: 'admin', user_id: '000000000000000000000003'}
});
});
it('is disabled in production', done => {
TestHelper.request(server, done, {
method: 'get',
url: '/api/authorized',
auth: {basic: 'admin'},
httpStatus: 404
}); });
}); });
// it('is disabled in production', done => {
// TestHelper.request(server, done, {
// method: 'get',
// url: '/api/authorized',
// auth: {basic: 'admin'},
// httpStatus: 404
// });
// });
}); });

View File

@ -14,21 +14,33 @@ router.get('/', (req, res) => {
router.get('/authorized', (req, res) => { router.get('/authorized', (req, res) => {
if (!req.auth(res, globals.levels)) return; if (!req.auth(res, globals.levels)) return;
res.json({status: 'Authorization successful', method: req.authDetails.method, level: req.authDetails.level}); res.json({
status: 'Authorization successful',
method: req.authDetails.method,
level: req.authDetails.level,
user_id: req.authDetails.id
});
}); });
// TODO: evaluate exact changelog functionality (restoring, delting after time, etc.) // TODO: evaluate exact changelog functionality (restoring, deleting after time, etc.)
router.get('/changelog/:timestamp/:page?/:pagesize?', (req, res, next) => { router.get('/changelog/:timestamp/:page?/:pagesize?', (req, res, next) => {
if (!req.auth(res, ['maintain', 'admin'], 'basic')) return; if (!req.auth(res, ['maintain', 'admin'], 'basic')) return;
const {error, value: options} = RootValidate.changelogParams({timestamp: req.params.timestamp, page: req.params.page, pagesize: req.params.pagesize}); const {error, value: options} = RootValidate.changelogParams({
timestamp: req.params.timestamp,
page: req.params.page,
pagesize: req.params.pagesize
});
if (error) return res400(error, res); if (error) return res400(error, res);
const id = new mongoose.Types.ObjectId(Math.floor(new Date(options.timestamp).getTime() / 1000).toString(16) + '0000000000000000'); const id = new mongoose.Types
ChangelogModel.find({_id: {$lte: id}}).sort({_id: -1}).skip(options.page * options.pagesize).limit(options.pagesize).lean().exec((err, data) => { .ObjectId(Math.floor(new Date(options.timestamp).getTime() / 1000).toString(16) + '0000000000000000');
ChangelogModel.find({_id: {$lte: id}}).sort({_id: -1}).skip(options.page * options.pagesize).limit(options.pagesize)
.lean().exec((err, data) => {
if (err) return next(err); if (err) return next(err);
res.json(_.compact(data.map(e => RootValidate.changelogOutput(e)))); // validate all and filter null values from validation errors // validate all and filter null values from validation errors
res.json(_.compact(data.map(e => RootValidate.changelogOutput(e))));
}); });
}); });

View File

@ -830,7 +830,7 @@ describe('/sample', () => {
url: '/sample/400000000000000000000001', url: '/sample/400000000000000000000001',
auth: {basic: 'janedoe'}, auth: {basic: 'janedoe'},
httpStatus: 200, httpStatus: 200,
req: {type: 'part', color: 'signalviolet', batch: '114531', condition: {condition_template: '200000000000000000000003'}, material_id: '100000000000000000000002', notes: {comment: 'Testcomment', sample_references: [{sample_id: '400000000000000000000003', relation: 'part to this sample'}]}} req: {type: 'other', color: 'signalviolet', batch: '114531', condition: {condition_template: '200000000000000000000003'}, material_id: '100000000000000000000002', notes: {comment: 'Testcomment', sample_references: [{sample_id: '400000000000000000000003', relation: 'part to this sample'}]}}
}).end(err => { }).end(err => {
if (err) return done (err); if (err) return done (err);
SampleModel.findById('400000000000000000000001').lean().exec((err, data: any) => { SampleModel.findById('400000000000000000000001').lean().exec((err, data: any) => {
@ -839,7 +839,7 @@ describe('/sample', () => {
should(data).have.property('_id'); should(data).have.property('_id');
should(data).have.property('number', '1'); should(data).have.property('number', '1');
should(data).have.property('color', 'signalviolet'); should(data).have.property('color', 'signalviolet');
should(data).have.property('type', 'part'); should(data).have.property('type', 'other');
should(data).have.property('batch', '114531'); should(data).have.property('batch', '114531');
should(data).have.property('condition', {condition_template: '200000000000000000000003'}); should(data).have.property('condition', {condition_template: '200000000000000000000003'});
should(data.material_id.toString()).be.eql('100000000000000000000002'); should(data.material_id.toString()).be.eql('100000000000000000000002');
@ -1054,6 +1054,16 @@ describe('/sample', () => {
res: {status: 'Condition template not available'} res: {status: 'Condition template not available'}
}); });
}); });
it('rejects a not accepted type', done => {
TestHelper.request(server, done, {
method: 'put',
url: '/sample/400000000000000000000001',
auth: {basic: 'janedoe'},
httpStatus: 400,
req: {type: 'xx'},
res: {status: 'Invalid body format', details: '"type" must be one of [granulate, part, tension rod, other]'}
});
});
it('allows keeping an empty condition empty', done => { it('allows keeping an empty condition empty', done => {
TestHelper.request(server, done, { TestHelper.request(server, done, {
method: 'put', method: 'put',
@ -1937,6 +1947,16 @@ describe('/sample', () => {
res: {status: 'Invalid body format', details: 'Invalid object id'} res: {status: 'Invalid body format', details: 'Invalid object id'}
}); });
}); });
it('rejects a not accepted type', done => {
TestHelper.request(server, done, {
method: 'post',
url: '/sample/new',
auth: {basic: 'janedoe'},
httpStatus: 400,
req: {color: 'black', type: 'xx', batch: '1560237365', material_id: '100000000000000000000001', notes: {comment: 'Testcomment'}},
res: {status: 'Invalid body format', details: '"type" must be one of [granulate, part, tension rod, other]'}
});
});
it('rejects an API key', done => { it('rejects an API key', done => {
TestHelper.request(server, done, { TestHelper.request(server, done, {
method: 'post', method: 'post',

View File

@ -28,6 +28,7 @@ const router = express.Router();
// TODO: think about filter keys with measurement template versions // TODO: think about filter keys with measurement template versions
router.get('/samples', async (req, res, next) => { router.get('/samples', async (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return; if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
@ -35,7 +36,8 @@ router.get('/samples', async (req, res, next) => {
if (error) return res400(error, res); if (error) return res400(error, res);
// TODO: find a better place for these // TODO: find a better place for these
const sampleKeys = ['_id', 'color', 'number', 'type', 'batch', 'added', 'condition', 'material_id', 'note_id', 'user_id']; const sampleKeys = ['_id', 'color', 'number', 'type', 'batch', 'added', 'condition', 'material_id', 'note_id',
'user_id'];
// evaluate sort parameter from 'color-asc' to ['color', 1] // evaluate sort parameter from 'color-asc' to ['color', 1]
filters.sort = filters.sort.split('-'); filters.sort = filters.sort.split('-');
@ -74,7 +76,8 @@ router.get('/samples', async (req, res, next) => {
} }
else { else {
// start and end of day // start and end of day
const date = [new Date(addedFilter.values[0]).setHours(0,0,0,0), new Date(addedFilter.values[0]).setHours(23,59,59,999)]; const date = [new Date(addedFilter.values[0]).setHours(0,0,0,0),
new Date(addedFilter.values[0]).setHours(23,59,59,999)];
if (addedFilter.mode === 'lt') { // lt start if (addedFilter.mode === 'lt') { // lt start
filters.filters.push({mode: 'lt', field: '_id', values: [dateToOId(date[0])]}); filters.filters.push({mode: 'lt', field: '_id', values: [dateToOId(date[0])]});
} }
@ -88,7 +91,8 @@ router.get('/samples', async (req, res, next) => {
filters.filters.push({mode: 'gte', field: '_id', values: [dateToOId(date[0])]}); filters.filters.push({mode: 'gte', field: '_id', values: [dateToOId(date[0])]});
} }
if (addedFilter.mode === 'ne') { if (addedFilter.mode === 'ne') {
filters.filters.push({mode: 'or', field: '_id', values: [{ _id: { '$lt': dateToOId(date[0])}}, { _id: { '$gt': dateToOId(date[1])}}]}); filters.filters.push({mode: 'or', field: '_id',
values: [{ _id: { '$lt': dateToOId(date[0])}}, { _id: { '$gt': dateToOId(date[1])}}]});
} }
} }
} }
@ -103,23 +107,27 @@ router.get('/samples', async (req, res, next) => {
if (filters.sort[0].indexOf('measurements.') >= 0) { // sorting with measurements as starting collection if (filters.sort[0].indexOf('measurements.') >= 0) { // sorting with measurements as starting collection
collection = MeasurementModel; collection = MeasurementModel;
const [,measurementName, measurementParam] = filters.sort[0].split('.'); const [,measurementName, measurementParam] = filters.sort[0].split('.');
const measurementTemplates = await MeasurementTemplateModel.find({name: measurementName}).lean().exec().catch(err => {next(err);}); const measurementTemplates = await MeasurementTemplateModel.find({name: measurementName})
.lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return; if (measurementTemplates instanceof Error) return;
if (!measurementTemplates) { if (!measurementTemplates) {
return res.status(400).json({status: 'Invalid body format', details: filters.sort[0] + ' not found'}); return res.status(400).json({status: 'Invalid body format', details: filters.sort[0] + ' not found'});
} }
let sortStartValue = null; let sortStartValue = null;
if (filters['from-id']) { // from-id specified, fetch values for sorting if (filters['from-id']) { // from-id specified, fetch values for sorting
const fromSample = await MeasurementModel.findOne({sample_id: mongoose.Types.ObjectId(filters['from-id'])}).lean().exec().catch(err => {next(err);}); // TODO: what if more than one measurement for sample? const fromSample = await MeasurementModel.findOne({sample_id: mongoose.Types.ObjectId(filters['from-id'])})
.lean().exec().catch(err => {next(err);}); // TODO: what if more than one measurement for sample?
if (fromSample instanceof Error) return; if (fromSample instanceof Error) return;
if (!fromSample) { if (!fromSample) {
return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'}); return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'});
} }
sortStartValue = fromSample.values[measurementParam]; sortStartValue = fromSample.values[measurementParam];
} }
queryPtr[0].$match.$and.push({measurement_template: {$in: measurementTemplates.map(e => e._id)}}); // find measurements to sort // find measurements to sort
queryPtr[0].$match.$and.push({measurement_template: {$in: measurementTemplates.map(e => e._id)}});
if (filters.filters.find(e => e.field === filters.sort[0])) { // sorted measurement should also be filtered if (filters.filters.find(e => e.field === filters.sort[0])) { // sorted measurement should also be filtered
queryPtr[0].$match.$and.push(...filterQueries(filters.filters.filter(e => e.field === filters.sort[0]).map(e => {e.field = 'values.' + e.field.split('.')[2]; return e; }))); queryPtr[0].$match.$and.push(...filterQueries(filters.filters.filter(e => e.field === filters.sort[0])
.map(e => {e.field = 'values.' + e.field.split('.')[2]; return e; })));
} }
queryPtr.push( queryPtr.push(
...sortQuery(filters, ['values.' + measurementParam, 'sample_id'], sortStartValue), // sort measurements ...sortQuery(filters, ['values.' + measurementParam, 'sample_id'], sortStartValue), // sort measurements
@ -159,43 +167,52 @@ router.get('/samples', async (req, res, next) => {
let materialAdded = false; let materialAdded = false;
if (sortFilterKeys.find(e => /material\./.test(e))) { // add material fields if (sortFilterKeys.find(e => /material\./.test(e))) { // add material fields
materialAdded = true; materialAdded = true;
materialQuery.push( // add material properties materialQuery.push( // add material properties // TODO: project out unnecessary fields
{$lookup: {from: 'materials', localField: 'material_id', foreignField: '_id', as: 'material'}}, // TODO: project out unnecessary fields {$lookup: {from: 'materials', localField: 'material_id', foreignField: '_id', as: 'material'}},
{$addFields: {material: {$arrayElemAt: ['$material', 0]}}} {$addFields: {material: {$arrayElemAt: ['$material', 0]}}}
); );
const baseMFilters = sortFilterKeys.filter(e => /material\./.test(e)).filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) < 0); const baseMFilters = sortFilterKeys.filter(e => /material\./.test(e))
addFilterQueries(materialQuery, filters.filters.filter(e => baseMFilters.indexOf(e.field) >= 0)); // base material filters .filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) < 0);
// base material filters
addFilterQueries(materialQuery, filters.filters.filter(e => baseMFilters.indexOf(e.field) >= 0));
if (sortFilterKeys.find(e => e === 'material.supplier')) { // add supplier if needed if (sortFilterKeys.find(e => e === 'material.supplier')) { // add supplier if needed
materialQuery.push( materialQuery.push(
{$lookup: { from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}}, {$lookup: {
from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}
},
{$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}} {$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}}
); );
} }
if (sortFilterKeys.find(e => e === 'material.group')) { // add group if needed if (sortFilterKeys.find(e => e === 'material.group')) { // add group if needed
materialQuery.push( materialQuery.push(
{$lookup: { from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }}, {$lookup: {
from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }
},
{$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}} {$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}}
); );
} }
if (sortFilterKeys.find(e => e === 'material.number')) { // add material number if needed if (sortFilterKeys.find(e => e === 'material.number')) { // add material number if needed
materialQuery.push( materialQuery.push(
{$addFields: {'material.number': { $arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]}}} {$addFields: {'material.number': { $arrayElemAt: [
'$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}
]}}}
); );
} }
const specialMFilters = sortFilterKeys.filter(e => /material\./.test(e)).filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) >= 0); const specialMFilters = sortFilterKeys.filter(e => /material\./.test(e))
addFilterQueries(materialQuery, filters.filters.filter(e => specialMFilters.indexOf(e.field) >= 0)); // base material filters .filter(e => ['material.supplier', 'material.group', 'material.number'].indexOf(e) >= 0);
// base material filters
addFilterQueries(materialQuery, filters.filters.filter(e => specialMFilters.indexOf(e.field) >= 0));
queryPtr.push(...materialQuery); queryPtr.push(...materialQuery);
if (/material\./.test(filters.sort[0])) { // sort by material key if (/material\./.test(filters.sort[0])) { // sort by material key
let sortStartValue = null; let sortStartValue = null;
if (filters['from-id']) { // from-id specified if (filters['from-id']) { // from-id specified
const fromSample = await SampleModel.aggregate([{$match: {_id: mongoose.Types.ObjectId(filters['from-id'])}}, ...materialQuery]).exec().catch(err => {next(err);}); const fromSample = await SampleModel.aggregate(
[{$match: {_id: mongoose.Types.ObjectId(filters['from-id'])}}, ...materialQuery]
).exec().catch(err => {next(err);});
if (fromSample instanceof Error) return; if (fromSample instanceof Error) return;
if (!fromSample) { if (!fromSample) {
return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'}); return res.status(400).json({status: 'Invalid body format', details: 'from-id not found'});
} }
console.log(fromSample);
console.log(filters.sort[0]);
console.log(fromSample[filters.sort[0]]);
const filterKey = filters.sort[0].split('.'); const filterKey = filters.sort[0].split('.');
if (filterKey.length === 2) { if (filterKey.length === 2) {
sortStartValue = fromSample[0][filterKey[0]][filterKey[1]]; sortStartValue = fromSample[0][filterKey[0]][filterKey[1]];
@ -208,23 +225,25 @@ router.get('/samples', async (req, res, next) => {
} }
} }
const measurementFilterFields = _.uniq(sortFilterKeys.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters const measurementFilterFields = _.uniq(sortFilterKeys.filter(e => /measurements\./.test(e))
.map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters
if (sortFilterKeys.find(e => /measurements\./.test(e))) { // add measurement fields if (sortFilterKeys.find(e => /measurements\./.test(e))) { // add measurement fields
const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFilterFields}}).lean().exec().catch(err => {next(err);}); const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFilterFields}})
.lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return; if (measurementTemplates instanceof Error) return;
if (measurementTemplates.length < measurementFilterFields.length) { if (measurementTemplates.length < measurementFilterFields.length) {
return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'}); return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'});
} }
queryPtr.push({$lookup: { queryPtr.push({$lookup: {
from: 'measurements', let: {sId: '$_id'}, from: 'measurements', let: {sId: '$_id'},
pipeline: [{$match: {$expr: {$and: [{$eq: ['$sample_id', '$$sId']}, {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}]}}}], pipeline: [{$match: {$expr: {$and: [
{$eq: ['$sample_id', '$$sId']},
{$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}
]}}}],
as: 'measurements' as: 'measurements'
}}); }});
measurementTemplates.forEach(template => { measurementTemplates.forEach(template => {
queryPtr.push({$addFields: {[template.name]: {$let: { // add measurements as property [template.name], if one result, array is reduced to direct values addMeasurements(queryPtr, template);
vars: {arr: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}}}},
in:{$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
}}}}, {$addFields: {[template.name]: {$cond: ['$' + template.name + '.values', '$' + template.name + '.values', template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})]}}});
}); });
addFilterQueries(queryPtr, filters.filters addFilterQueries(queryPtr, filters.filters
.filter(e => sortFilterKeys.filter(e => /measurements\./.test(e)).indexOf(e.field) >= 0) .filter(e => sortFilterKeys.filter(e => /measurements\./.test(e)).indexOf(e.field) >= 0)
@ -232,14 +251,18 @@ router.get('/samples', async (req, res, next) => {
); // measurement filters ); // measurement filters
} }
if (!filters.fields.find(e => /spectrum\./.test(e)) && !filters['from-id']) { // count total number of items before $skip and $limit, only works when from-id is not specified and spectra are not included // count total number of items before $skip and $limit, only works when from-id is not specified and spectra are not
// included
if (!filters.fields.find(e => /spectrum\./.test(e)) && !filters['from-id']) {
queryPtr.push({$facet: {count: [{$count: 'count'}], samples: []}}); queryPtr.push({$facet: {count: [{$count: 'count'}], samples: []}});
queryPtr = queryPtr[queryPtr.length - 1].$facet.samples; // add rest of aggregation pipeline into $facet queryPtr = queryPtr[queryPtr.length - 1].$facet.samples; // add rest of aggregation pipeline into $facet
} }
// paging // paging
if (filters['to-page']) { if (filters['to-page']) {
queryPtr.push({$skip: Math.abs(filters['to-page'] + Number(filters['to-page'] < 0)) * filters['page-size'] + Number(filters['to-page'] < 0)}) // number to skip, if going back pages, one page has to be skipped less but on sample more // number to skip, if going back pages, one page has to be skipped less but on sample more
queryPtr.push({$skip: Math.abs(filters['to-page'] + Number(filters['to-page'] < 0)) * filters['page-size'] +
Number(filters['to-page'] < 0)})
} }
if (filters['page-size']) { if (filters['page-size']) {
queryPtr.push({$limit: filters['page-size']}); queryPtr.push({$limit: filters['page-size']});
@ -265,51 +288,65 @@ router.get('/samples', async (req, res, next) => {
} }
if (fieldsToAdd.indexOf('material.supplier') >= 0) { // add supplier if needed if (fieldsToAdd.indexOf('material.supplier') >= 0) { // add supplier if needed
queryPtr.push( queryPtr.push(
{$lookup: { from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'}}, {$lookup: {
from: 'material_suppliers', localField: 'material.supplier_id', foreignField: '_id', as: 'material.supplier'
}},
{$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}} {$addFields: {'material.supplier': {$arrayElemAt: ['$material.supplier.name', 0]}}}
); );
} }
if (fieldsToAdd.indexOf('material.group') >= 0) { // add group if needed if (fieldsToAdd.indexOf('material.group') >= 0) { // add group if needed
queryPtr.push( queryPtr.push(
{$lookup: { from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group' }}, {$lookup: {
from: 'material_groups', localField: 'material.group_id', foreignField: '_id', as: 'material.group'
}},
{$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}} {$addFields: {'material.group': { $arrayElemAt: ['$material.group.name', 0]}}}
); );
} }
if (fieldsToAdd.indexOf('material.number') >= 0) { // add material number if needed if (fieldsToAdd.indexOf('material.number') >= 0) { // add material number if needed
queryPtr.push( queryPtr.push(
{$addFields: {'material.number': { $arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]}}} {$addFields: {'material.number': {
$arrayElemAt: ['$material.numbers.number', {$indexOfArray: ['$material.numbers.color', '$color']}]
}}}
); );
} }
let measurementFieldsFields: string[] = _.uniq(fieldsToAdd.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])); // filter measurement names and remove duplicates from parameters let measurementFieldsFields: string[] = _.uniq(
fieldsToAdd.filter(e => /measurements\./.test(e)).map(e => e.split('.')[1])
); // filter measurement names and remove duplicates from parameters
if (fieldsToAdd.find(e => /measurements\./.test(e))) { // add measurement fields if (fieldsToAdd.find(e => /measurements\./.test(e))) { // add measurement fields
const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFieldsFields}}).lean().exec().catch(err => {next(err);}); const measurementTemplates = await MeasurementTemplateModel.find({name: {$in: measurementFieldsFields}})
.lean().exec().catch(err => {next(err);});
if (measurementTemplates instanceof Error) return; if (measurementTemplates instanceof Error) return;
if (measurementTemplates.length < measurementFieldsFields.length) { if (measurementTemplates.length < measurementFieldsFields.length) {
return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'}); return res.status(400).json({status: 'Invalid body format', details: 'Measurement key not found'});
} }
if (fieldsToAdd.find(e => /spectrum\./.test(e))) { // use different lookup methods with and without spectrum for the best performance // use different lookup methods with and without spectrum for the best performance
queryPtr.push({$lookup: {from: 'measurements', localField: '_id', foreignField: 'sample_id', as: 'measurements'}}); if (fieldsToAdd.find(e => /spectrum\./.test(e))) {
queryPtr.push(
{$lookup: {from: 'measurements', localField: '_id', foreignField: 'sample_id', as: 'measurements'}}
);
} }
else { else {
queryPtr.push({$lookup: { queryPtr.push({$lookup: {
from: 'measurements', let: {sId: '$_id'}, from: 'measurements', let: {sId: '$_id'},
pipeline: [{$match: {$expr: {$and: [{$eq: ['$sample_id', '$$sId']}, {$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}]}}}], pipeline: [{$match: {$expr: {$and: [
{$eq: ['$sample_id', '$$sId']},
{$in: ['$measurement_template', measurementTemplates.map(e => mongoose.Types.ObjectId(e._id))]}
]}}}],
as: 'measurements' as: 'measurements'
}}); }});
} }
measurementTemplates.forEach(template => { // TODO: hard coded dpt for special treatment, change later measurementTemplates.forEach(template => { // TODO: hard coded dpt for special treatment, change later
queryPtr.push({$addFields: {[template.name]: {$let: { // add measurements as property [template.name], if one result, array is reduced to direct values addMeasurements(queryPtr, template);
vars: {arr: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}}}},
in:{$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
}}}}, {$addFields: {[template.name]: {$cond: ['$' + template.name + '.values', '$' + template.name + '.values', template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})]}}});
if (measurementFieldsFields.find(e => e === 'spectrum')) { if (measurementFieldsFields.find(e => e === 'spectrum')) {
queryPtr.push({$unwind: '$spectrum'}); queryPtr.push({$unwind: '$spectrum'});
} }
}); });
// if (measurementFieldsFields.find(e => e === 'spectrum')) { // TODO: remove hardcoded as well // if (measurementFieldsFields.find(e => e === 'spectrum')) { // TODO: remove hardcoded as well
// queryPtr.push( // queryPtr.push(
// {$addFields: {spectrum: {$filter: {input: '$measurements', cond: {$eq: ['$$this.measurement_template', measurementTemplates.filter(e => e.name === 'spectrum')[0]._id]}}}}}, // {$addFields: {spectrum: {$filter: {input: '$measurements', cond: {
// $eq: ['$$this.measurement_template', measurementTemplates.filter(e => e.name === 'spectrum')[0]._id]
// }}}}},
// {$addFields: {spectrum: '$spectrum.values'}}, // {$addFields: {spectrum: '$spectrum.values'}},
// {$unwind: '$spectrum'} // {$unwind: '$spectrum'}
// ); // );
@ -318,10 +355,11 @@ router.get('/samples', async (req, res, next) => {
queryPtr.push({$project: {measurements: 0}}); queryPtr.push({$project: {measurements: 0}});
} }
const projection = filters.fields.map(e => e.replace('measurements.', '')).reduce((s, e) => {s[e] = true; return s; }, {}); const projection = filters.fields.map(e => e.replace('measurements.', ''))
if (filters.fields.indexOf('added') >= 0) { // add added date .reduce((s, e) => {s[e] = true; return s; }, {});
if (filters.fields.indexOf('added') >= 0) { // add added date // TODO: upgrade MongoDB version or find alternative
// projection.added = {$toDate: '$_id'}; // projection.added = {$toDate: '$_id'};
// projection.added = { $convert: { input: '$_id', to: "date" } } // TODO: upgrade MongoDB version or find alternative // projection.added = { $convert: { input: '$_id', to: "date" } }
} }
if (filters.fields.indexOf('_id') < 0 && filters.fields.indexOf('added') < 0) { // disable _id explicitly if (filters.fields.indexOf('_id') < 0 && filters.fields.indexOf('added') < 0) { // disable _id explicitly
projection._id = false; projection._id = false;
@ -347,7 +385,10 @@ router.get('/samples', async (req, res, next) => {
if (filters['to-page'] < 0) { if (filters['to-page'] < 0) {
data.reverse(); data.reverse();
} }
const measurementFields = _.uniq([filters.sort[0].split('.')[1], ...measurementFilterFields, ...measurementFieldsFields]); const measurementFields = _.uniq(
[filters.sort[0].split('.')[1],
...measurementFilterFields, ...measurementFieldsFields]
);
if (filters.csv) { // output as csv if (filters.csv) { // output as csv
csv(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields))), (err, data) => { csv(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields))), (err, data) => {
if (err) return next(err); if (err) return next(err);
@ -355,8 +396,8 @@ router.get('/samples', async (req, res, next) => {
res.send(data); res.send(data);
}); });
} }
else { else { // validate all and filter null values from validation errors
res.json(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields)))); // validate all and filter null values from validation errors res.json(_.compact(data.map(e => SampleValidate.output(e, 'refs', measurementFields))));
} }
}); });
} }
@ -389,7 +430,8 @@ router.get('/samples/:state(new|deleted)', (req, res, next) => {
SampleModel.find({status: globals.status[req.params.state]}).lean().exec((err, data) => { SampleModel.find({status: globals.status[req.params.state]}).lean().exec((err, data) => {
if (err) return next(err); if (err) return next(err);
res.json(_.compact(data.map(e => SampleValidate.output(e)))); // validate all and filter null values from validation errors // validate all and filter null values from validation errors
res.json(_.compact(data.map(e => SampleValidate.output(e))));
}); });
}); });
@ -405,7 +447,8 @@ router.get('/samples/count', (req, res, next) => {
router.get('/sample/' + IdValidate.parameter(), (req, res, next) => { router.get('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return; if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
SampleModel.findById(req.params.id).populate('material_id').populate('user_id', 'name').populate('note_id').exec(async (err, sampleData: any) => { SampleModel.findById(req.params.id).populate('material_id').populate('user_id', 'name').populate('note_id')
.exec(async (err, sampleData: any) => {
if (err) return next(err); if (err) return next(err);
await sampleReturn(sampleData, req, res, next); await sampleReturn(sampleData, req, res, next);
}); });
@ -434,8 +477,11 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
else if (sample.hasOwnProperty('color')) { else if (sample.hasOwnProperty('color')) {
if (!await materialCheck(sample, res, next, sampleData.material_id)) return; if (!await materialCheck(sample, res, next, sampleData.material_id)) return;
} }
if (sample.hasOwnProperty('condition') && !(_.isEmpty(sample.condition) && _.isEmpty(sampleData.condition))) { // do not execute check if condition is and was empty // do not execute check if condition is and was empty
if (!await conditionCheck(sample.condition, 'change', res, next, sampleData.condition.condition_template.toString() !== sample.condition.condition_template)) return; if (sample.hasOwnProperty('condition') && !(_.isEmpty(sample.condition) && _.isEmpty(sampleData.condition))) {
if (!await conditionCheck(sample.condition, 'change', res, next,
!(sampleData.condition.condition_template &&
sampleData.condition.condition_template.toString() === sample.condition.condition_template))) return;
} }
if (sample.hasOwnProperty('notes')) { if (sample.hasOwnProperty('notes')) {
@ -443,7 +489,8 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (sampleData.note_id !== null) { // old notes data exists if (sampleData.note_id !== null) { // old notes data exists
const data = await NoteModel.findById(sampleData.note_id).lean().exec().catch(err => {next(err);}) as any; const data = await NoteModel.findById(sampleData.note_id).lean().exec().catch(err => {next(err);}) as any;
if (data instanceof Error) return; if (data instanceof Error) return;
newNotes = !_.isEqual(_.pick(IdValidate.stringify(data), _.keys(sample.notes)), sample.notes); // check if notes were changed // check if notes were changed
newNotes = !_.isEqual(_.pick(IdValidate.stringify(data), _.keys(sample.notes)), sample.notes);
if (newNotes) { if (newNotes) {
if (data.hasOwnProperty('custom_fields')) { // update note_fields if (data.hasOwnProperty('custom_fields')) { // update note_fields
customFieldsChange(Object.keys(data.custom_fields), -1, req); customFieldsChange(Object.keys(data.custom_fields), -1, req);
@ -456,7 +503,8 @@ router.put('/sample/' + IdValidate.parameter(), (req, res, next) => {
if (_.keys(sample.notes).length > 0 && newNotes) { // save new notes if (_.keys(sample.notes).length > 0 && newNotes) { // save new notes
if (!await sampleRefCheck(sample, res, next)) return; if (!await sampleRefCheck(sample, res, next)) return;
if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) { // new custom_fields // new custom_fields
if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) {
customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req); customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req);
} }
let data = await new NoteModel(sample.notes).save().catch(err => { return next(err)}); // save new notes let data = await new NoteModel(sample.notes).save().catch(err => { return next(err)}); // save new notes
@ -491,11 +539,13 @@ router.delete('/sample/' + IdValidate.parameter(), (req, res, next) => {
// only maintain and admin are allowed to edit other user's data // only maintain and admin are allowed to edit other user's data
if (sampleData.user_id.toString() !== req.authDetails.id && !req.auth(res, ['maintain', 'admin'], 'basic')) return; if (sampleData.user_id.toString() !== req.authDetails.id && !req.auth(res, ['maintain', 'admin'], 'basic')) return;
await SampleModel.findByIdAndUpdate(req.params.id, {status:globals.status.deleted}).log(req).lean().exec(err => { // set sample status // set sample status
await SampleModel.findByIdAndUpdate(req.params.id, {status:globals.status.deleted}).log(req).lean().exec(err => {
if (err) return next(err); if (err) return next(err);
// set status of associated measurements also to deleted // set status of associated measurements also to deleted
MeasurementModel.updateMany({sample_id: mongoose.Types.ObjectId(req.params.id)}, {status: -1}).log(req).lean().exec(err => { MeasurementModel.updateMany({sample_id: mongoose.Types.ObjectId(req.params.id)}, {status: -1})
.log(req).lean().exec(err => {
if (err) return next(err); if (err) return next(err);
if (sampleData.note_id !== null) { // handle notes if (sampleData.note_id !== null) { // handle notes
@ -518,7 +568,8 @@ router.delete('/sample/' + IdValidate.parameter(), (req, res, next) => {
router.get('/sample/number/:number', (req, res, next) => { router.get('/sample/number/:number', (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return; if (!req.auth(res, ['read', 'write', 'maintain', 'dev', 'admin'], 'all')) return;
SampleModel.findOne({number: req.params.number}).populate('material_id').populate('user_id', 'name').populate('note_id').exec(async (err, sampleData: any) => { SampleModel.findOne({number: req.params.number}).populate('material_id').populate('user_id', 'name')
.populate('note_id').exec(async (err, sampleData: any) => {
if (err) return next(err); if (err) return next(err);
await sampleReturn(sampleData, req, res, next); await sampleReturn(sampleData, req, res, next);
}); });
@ -572,13 +623,15 @@ router.post('/sample/new', async (req, res, next) => {
req.body.condition = {}; req.body.condition = {};
} }
const {error, value: sample} = SampleValidate.input(req.body, 'new' + (req.authDetails.level === 'admin' ? '-admin' : '')); const {error, value: sample} =
SampleValidate.input(req.body, 'new' + (req.authDetails.level === 'admin' ? '-admin' : ''));
if (error) return res400(error, res); if (error) return res400(error, res);
if (!await materialCheck(sample, res, next)) return; if (!await materialCheck(sample, res, next)) return;
if (!await sampleRefCheck(sample, res, next)) return; if (!await sampleRefCheck(sample, res, next)) return;
if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) { // new custom_fields // new custom_fields
if (sample.notes.hasOwnProperty('custom_fields') && Object.keys(sample.notes.custom_fields).length > 0) {
customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req); customFieldsChange(Object.keys(sample.notes.custom_fields), 1, req);
} }
@ -615,21 +668,27 @@ router.get('/sample/notes/fields', (req, res, next) => {
NoteFieldModel.find({}).lean().exec((err, data) => { NoteFieldModel.find({}).lean().exec((err, data) => {
if (err) return next(err); if (err) return next(err);
res.json(_.compact(data.map(e => NoteFieldValidate.output(e)))); // validate all and filter null values from validation errors // validate all and filter null values from validation errors
res.json(_.compact(data.map(e => NoteFieldValidate.output(e))));
}) })
}); });
module.exports = router; module.exports = router;
// store the highest generated number for each location to avoid duplicate numbers
const numberBuffer: {[location: string]: number} = {};
async function numberGenerate (sample, req, res, next) { // generate number in format Location32, returns false on error // generate number in format Location32, returns false on error
async function numberGenerate (sample, req, res, next) {
const sampleData = await SampleModel const sampleData = await SampleModel
.aggregate([ .aggregate([
{$match: {number: new RegExp('^' + req.authDetails.location + '[0-9]+$', 'm')}}, {$match: {number: new RegExp('^' + req.authDetails.location + '[0-9]+$', 'm')}},
// {$addFields: {number2: {$toDecimal: {$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}}}}, // not working with MongoDb 3.6 // {$addFields: {number2: {$toDecimal: {$arrayElemAt: [{$split: [{$arrayElemAt:
// [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}}}}, // not working with MongoDb 3.6
{$addFields: {sortNumber: {$let: { {$addFields: {sortNumber: {$let: {
vars: {tmp: {$concat: ['000000000000000000000000000000', {$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}]}}, vars: {tmp: {$concat: ['000000000000000000000000000000',
{$arrayElemAt: [{$split: [{$arrayElemAt: [{$split: ['$number', 'Rng']}, 1]}, '_']}, 0]}]}},
in: {$substrCP: ['$$tmp', {$subtract: [{$strLenCP: '$$tmp'}, 30]}, {$strLenCP: '$$tmp'}]} in: {$substrCP: ['$$tmp', {$subtract: [{$strLenCP: '$$tmp'}, 30]}, {$strLenCP: '$$tmp'}]}
}}}}, }}}},
{$sort: {sortNumber: -1}}, {$sort: {sortNumber: -1}},
@ -638,11 +697,18 @@ async function numberGenerate (sample, req, res, next) { // generate number in
.exec() .exec()
.catch(err => next(err)); .catch(err => next(err));
if (sampleData instanceof Error) return false; if (sampleData instanceof Error) return false;
return req.authDetails.location + (sampleData[0] ? Number(sampleData[0].number.replace(/[^0-9]+/g, '')) + 1 : 1); let number = (sampleData[0] ? Number(sampleData[0].number.replace(/[^0-9]+/g, '')) : 0);
if (numberBuffer[req.authDetails.location] && numberBuffer[req.authDetails.location] >= number) {
number = numberBuffer[req.authDetails.location];
}
number ++;
numberBuffer[req.authDetails.location] = number;
return req.authDetails.location + number;
} }
async function numberCheck(sample, res, next) { async function numberCheck(sample, res, next) {
const sampleData = await SampleModel.findOne({number: sample.number}).lean().exec().catch(err => {next(err); return false;}); const sampleData = await SampleModel.findOne({number: sample.number})
.lean().exec().catch(err => {next(err); return false;});
if (sampleData) { // found entry with sample number if (sampleData) { // found entry with sample number
res.status(400).json({status: 'Sample number already taken'}); res.status(400).json({status: 'Sample number already taken'});
return false return false
@ -650,7 +716,8 @@ async function numberCheck(sample, res, next) {
return true; return true;
} }
async function materialCheck (sample, res, next, id = sample.material_id) { // validate material_id and color, returns false if invalid // validate material_id and color, returns false if invalid
async function materialCheck (sample, res, next, id = sample.material_id) {
const materialData = await MaterialModel.findById(id).lean().exec().catch(err => next(err)) as any; const materialData = await MaterialModel.findById(id).lean().exec().catch(err => next(err)) as any;
if (materialData instanceof Error) return false; if (materialData instanceof Error) return false;
if (!materialData) { // could not find material_id if (!materialData) { // could not find material_id
@ -660,12 +727,14 @@ async function materialCheck (sample, res, next, id = sample.material_id) { //
return true; return true;
} }
async function conditionCheck (condition, param, res, next, checkVersion = true) { // validate treatment template, returns false if invalid, otherwise template data // validate treatment template, returns false if invalid, otherwise template data
async function conditionCheck (condition, param, res, next, checkVersion = true) {
if (!condition.condition_template || !IdValidate.valid(condition.condition_template)) { // template id not found if (!condition.condition_template || !IdValidate.valid(condition.condition_template)) { // template id not found
res.status(400).json({status: 'Condition template not available'}); res.status(400).json({status: 'Condition template not available'});
return false; return false;
} }
const conditionData = await ConditionTemplateModel.findById(condition.condition_template).lean().exec().catch(err => next(err)) as any; const conditionData = await ConditionTemplateModel.findById(condition.condition_template)
.lean().exec().catch(err => next(err)) as any;
if (conditionData instanceof Error) return false; if (conditionData instanceof Error) return false;
if (!conditionData) { // template not found if (!conditionData) { // template not found
res.status(400).json({status: 'Condition template not available'}); res.status(400).json({status: 'Condition template not available'});
@ -674,7 +743,8 @@ async function conditionCheck (condition, param, res, next, checkVersion = true)
if (checkVersion) { if (checkVersion) {
// get all template versions and check if given is latest // get all template versions and check if given is latest
const conditionVersions = await ConditionTemplateModel.find({first_id: conditionData.first_id}).sort({version: -1}).lean().exec().catch(err => next(err)) as any; const conditionVersions = await ConditionTemplateModel.find({first_id: conditionData.first_id})
.sort({version: -1}).lean().exec().catch(err => next(err)) as any;
if (conditionVersions instanceof Error) return false; if (conditionVersions instanceof Error) return false;
if (condition.condition_template !== conditionVersions[0]._id.toString()) { // template not latest if (condition.condition_template !== conditionVersions[0]._id.toString()) { // template not latest
res.status(400).json({status: 'Old template version not allowed'}); res.status(400).json({status: 'Old template version not allowed'});
@ -683,14 +753,16 @@ async function conditionCheck (condition, param, res, next, checkVersion = true)
} }
// validate parameters // validate parameters
const {error, value: ignore} = ParametersValidate.input(_.omit(condition, 'condition_template'), conditionData.parameters, param); const {error, value: ignore} =
ParametersValidate.input(_.omit(condition, 'condition_template'), conditionData.parameters, param);
if (error) {res400(error, res); return false;} if (error) {res400(error, res); return false;}
return conditionData; return conditionData;
} }
function sampleRefCheck (sample, res, next) { // validate sample_references, resolves false for invalid reference function sampleRefCheck (sample, res, next) { // validate sample_references, resolves false for invalid reference
return new Promise(resolve => { return new Promise(resolve => {
if (sample.notes.hasOwnProperty('sample_references') && sample.notes.sample_references.length > 0) { // there are sample_references // there are sample_references
if (sample.notes.hasOwnProperty('sample_references') && sample.notes.sample_references.length > 0) {
let referencesCount = sample.notes.sample_references.length; // count to keep track of running async operations let referencesCount = sample.notes.sample_references.length; // count to keep track of running async operations
sample.notes.sample_references.forEach(reference => { sample.notes.sample_references.forEach(reference => {
@ -715,7 +787,8 @@ function sampleRefCheck (sample, res, next) { // validate sample_references, re
function customFieldsChange (fields, amount, req) { // update custom_fields and respective quantities function customFieldsChange (fields, amount, req) { // update custom_fields and respective quantities
fields.forEach(field => { fields.forEach(field => {
NoteFieldModel.findOneAndUpdate({name: field}, {$inc: {qty: amount}} as any, {new: true}).log(req).lean().exec((err, data: any) => { // check if field exists NoteFieldModel.findOneAndUpdate({name: field}, {$inc: {qty: amount}} as any, {new: true})
.log(req).lean().exec((err, data: any) => { // check if field exists
if (err) return console.error(err); if (err) return console.error(err);
if (!data) { // new field if (!data) { // new field
new NoteFieldModel({name: field, qty: 1}).save((err, data) => { new NoteFieldModel({name: field, qty: 1}).save((err, data) => {
@ -735,11 +808,27 @@ function customFieldsChange (fields, amount, req) { // update custom_fields and
function sortQuery(filters, sortKeys, sortStartValue) { // sortKeys = ['primary key', 'secondary key'] function sortQuery(filters, sortKeys, sortStartValue) { // sortKeys = ['primary key', 'secondary key']
if (filters['from-id']) { // from-id specified if (filters['from-id']) { // from-id specified
if ((filters['to-page'] === 0 && filters.sort[1] === 1) || (filters.sort[1] * filters['to-page'] > 0)) { // asc if ((filters['to-page'] === 0 && filters.sort[1] === 1) || (filters.sort[1] * filters['to-page'] > 0)) { // asc
return [{$match: {$or: [{[sortKeys[0]]: {$gt: sortStartValue}}, {$and: [{[sortKeys[0]]: sortStartValue}, {[sortKeys[1]]: {$gte: new mongoose.Types.ObjectId(filters['from-id'])}}]}]}}, return [
{$sort: {[sortKeys[0]]: 1, _id: 1}}]; {$match: {$or: [
{[sortKeys[0]]: {$gt: sortStartValue}},
{$and: [
{[sortKeys[0]]: sortStartValue},
{[sortKeys[1]]: {$gte: new mongoose.Types.ObjectId(filters['from-id'])}}
]}
]}},
{$sort: {[sortKeys[0]]: 1, _id: 1}}
];
} else { } else {
return [{$match: {$or: [{[sortKeys[0]]: {$lt: sortStartValue}}, {$and: [{[sortKeys[0]]: sortStartValue}, {[sortKeys[1]]: {$lte: new mongoose.Types.ObjectId(filters['from-id'])}}]}]}}, return [
{$sort: {[sortKeys[0]]: -1, _id: -1}}]; {$match: {$or: [
{[sortKeys[0]]: {$lt: sortStartValue}},
{$and: [
{[sortKeys[0]]: sortStartValue},
{[sortKeys[1]]: {$lte: new mongoose.Types.ObjectId(filters['from-id'])}}
]}
]}},
{$sort: {[sortKeys[0]]: -1, _id: -1}}
];
} }
} else { // sort from beginning } else { // sort from beginning
return [{$sort: {[sortKeys[0]]: filters.sort[1], [sortKeys[1]]: filters.sort[1]}}]; // set _id as secondary sort return [{$sort: {[sortKeys[0]]: filters.sort[1], [sortKeys[1]]: filters.sort[1]}}]; // set _id as secondary sort
@ -775,29 +864,49 @@ function filterQueries (filters) {
return {[e.field]: {['$in']: [new RegExp(e.values[0])]}}; return {[e.field]: {['$in']: [new RegExp(e.values[0])]}};
} }
else { else {
return {[e.field]: {['$' + e.mode]: (e.mode.indexOf('in') >= 0 ? e.values : e.values[0])}}; // add filter criteria as {field: {$mode: value}}, only use first value when mode is not in/nin // add filter criteria as {field: {$mode: value}}, only use first value when mode is not in/nin
return {[e.field]: {['$' + e.mode]: (e.mode.indexOf('in') >= 0 ? e.values : e.values[0])}};
} }
}); });
} }
// add measurements as property [template.name], if one result, array is reduced to direct values
function addMeasurements(queryPtr, template) {
queryPtr.push(
{$addFields: {[template.name]: {$let: {vars: {
arr: {$filter: {
input: '$measurements', cond: {$eq: ['$$this.measurement_template', mongoose.Types.ObjectId(template._id)]}
}}},
in: {$cond: [{$lte: [{$size: '$$arr'}, 1]}, {$arrayElemAt: ['$$arr', 0]}, '$$arr']}
}}}},
{$addFields: {[template.name]: {$cond: [
'$' + template.name + '.values',
'$' + template.name + '.values',
template.parameters.reduce((s, e) => {s[e.name] = null; return s;}, {})
]}}}
);
}
function dateToOId (date) { // convert date to ObjectId function dateToOId (date) { // convert date to ObjectId
return mongoose.Types.ObjectId(Math.floor(date / 1000).toString(16) + '0000000000000000'); return mongoose.Types.ObjectId(Math.floor(date / 1000).toString(16) + '0000000000000000');
} }
async function sampleReturn (sampleData, req, res, next) { async function sampleReturn (sampleData, req, res, next) {
if (sampleData) { if (sampleData) {
console.log(sampleData); await sampleData.populate('material_id.group_id').populate('material_id.supplier_id')
await sampleData.populate('material_id.group_id').populate('material_id.supplier_id').execPopulate().catch(err => next(err)); .execPopulate().catch(err => next(err));
if (sampleData instanceof Error) return; if (sampleData instanceof Error) return;
sampleData = sampleData.toObject(); sampleData = sampleData.toObject();
if (sampleData.status === globals.status.deleted && !req.auth(res, ['maintain', 'admin'], 'all')) return; // deleted samples only available for maintain/admin // deleted samples only available for maintain/admin
if (sampleData.status === globals.status.deleted && !req.auth(res, ['maintain', 'admin'], 'all')) return;
sampleData.material = sampleData.material_id; // map data to right keys sampleData.material = sampleData.material_id; // map data to right keys
sampleData.material.group = sampleData.material.group_id.name; sampleData.material.group = sampleData.material.group_id.name;
sampleData.material.supplier = sampleData.material.supplier_id.name; sampleData.material.supplier = sampleData.material.supplier_id.name;
sampleData.user = sampleData.user_id.name; sampleData.user = sampleData.user_id.name;
sampleData.notes = sampleData.note_id ? sampleData.note_id : {}; sampleData.notes = sampleData.note_id ? sampleData.note_id : {};
MeasurementModel.find({sample_id: sampleData._id, status: {$ne: globals.status.deleted}}).lean().exec((err, data) => { MeasurementModel.find({sample_id: sampleData._id, status: {$ne: globals.status.deleted}})
.lean().exec((err, data) => {
sampleData.measurements = data; sampleData.measurements = data;
res.json(SampleValidate.output(sampleData, 'details')); res.json(SampleValidate.output(sampleData, 'details'));
}); });

View File

@ -4,6 +4,7 @@ import TemplateConditionModel from '../models/condition_template';
import TemplateMeasurementModel from '../models/measurement_template'; import TemplateMeasurementModel from '../models/measurement_template';
import TestHelper from "../test/helper"; import TestHelper from "../test/helper";
// TODO: method to return only latest template versions -> rework frontend accordingly
describe('/template', () => { describe('/template', () => {
let server; let server;

View File

@ -15,7 +15,7 @@ export default class SampleValidate {
.allow(''), .allow(''),
type: Joi.string() type: Joi.string()
.max(128), .valid('granulate', 'part', 'tension rod'),
batch: Joi.string() batch: Joi.string()
.max(128) .max(128)
@ -116,7 +116,8 @@ export default class SampleValidate {
} }
} }
static output (data, param = 'refs+added', additionalParams = []) { // validate output and strip unwanted properties, returns null if not valid // validate output and strip unwanted properties, returns null if not valid
static output (data, param = 'refs+added', additionalParams = []) {
if (param === 'refs+added') { if (param === 'refs+added') {
param = 'refs'; param = 'refs';
data.added = data._id.getTimestamp(); data.added = data._id.getTimestamp();
@ -169,12 +170,16 @@ export default class SampleValidate {
if (filterValidation.error) return filterValidation; if (filterValidation.error) return filterValidation;
try { try {
for (let i in data.filters) { for (let i in data.filters) {
// data.filters[i] = JSON.parse(decodeURIComponent(data.filters[i]));
data.filters[i] = JSON.parse(data.filters[i]); data.filters[i] = JSON.parse(data.filters[i]);
data.filters[i].values = data.filters[i].values.map(e => { // validate filter values data.filters[i].values = data.filters[i].values.map(e => { // validate filter values
let validator; let validator;
let field = data.filters[i].field let field = data.filters[i].field
if (/material\./.test(field)) { // select right validation model if (/material\./.test(field)) { // select right validation model
validator = MaterialValidate.outputV().append({number: Joi.string().max(128).allow(''), properties: Joi.alternatives().try(Joi.number(), Joi.string().max(128))}); validator = MaterialValidate.outputV().append({
number: Joi.string().max(128).allow(''),
properties: Joi.alternatives().try(Joi.number(), Joi.string().max(128))
});
field = field.replace('material.', '').split('.')[0]; field = field.replace('material.', '').split('.')[0];
} }
else if (/measurements\./.test(field)) { else if (/measurements\./.test(field)) {
@ -194,12 +199,12 @@ export default class SampleValidate {
validator = Joi.object(this.sample); validator = Joi.object(this.sample);
} }
const {value, error} = validator.validate({[field]: e}); const {value, error} = validator.validate({[field]: e});
if (error) throw error; // reject invalid values // TODO: return exact error description, handle in frontend filters if (error) throw error; // reject invalid values
return value[field]; return value[field];
}); });
} }
} }
catch { catch (err) {
return {error: {details: [{message: 'Invalid JSON string for filter parameter'}]}, value: null} return {error: {details: [{message: 'Invalid JSON string for filter parameter'}]}, value: null}
} }
} }
@ -208,13 +213,22 @@ export default class SampleValidate {
'from-id': IdValidate.get(), 'from-id': IdValidate.get(),
'to-page': Joi.number().integer(), 'to-page': Joi.number().integer(),
'page-size': Joi.number().integer().min(1), 'page-size': Joi.number().integer().min(1),
sort: Joi.string().pattern(new RegExp('^(' + this.sortKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')-(asc|desc)$', 'm')).default('_id-asc'), sort: Joi.string().pattern(
new RegExp('^(' + this.sortKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')-(asc|desc)$', 'm')
).default('_id-asc'),
csv: Joi.boolean().default(false), csv: Joi.boolean().default(false),
fields: Joi.array().items(Joi.string().pattern(new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm'))).default(['_id','number','type','batch','material_id','color','condition','note_id','user_id','added']).messages({'string.pattern.base': 'Invalid field name'}), fields: Joi.array().items(Joi.string().pattern(
new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')
)).default(['_id','number','type','batch','material_id','color','condition','note_id','user_id','added'])
.messages({'string.pattern.base': 'Invalid field name'}),
filters: Joi.array().items(Joi.object({ filters: Joi.array().items(Joi.object({
mode: Joi.string().valid('eq', 'ne', 'lt', 'lte', 'gt', 'gte', 'in', 'nin', 'stringin'), mode: Joi.string().valid('eq', 'ne', 'lt', 'lte', 'gt', 'gte', 'in', 'nin', 'stringin'),
field: Joi.string().pattern(new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')).messages({'string.pattern.base': 'Invalid filter field name'}), field: Joi.string().pattern(
values: Joi.array().items(Joi.alternatives().try(Joi.string().max(128), Joi.number(), Joi.boolean(), Joi.date().iso(), Joi.object())).min(1) new RegExp('^(' + this.fieldKeys.join('|').replace(/\./g, '\\.').replace(/\*/g, '.+') + ')$', 'm')
).messages({'string.pattern.base': 'Invalid filter field name'}),
values: Joi.array().items(Joi.alternatives().try(
Joi.string().max(128), Joi.number(), Joi.boolean(), Joi.date().iso(), Joi.object()
)).min(1)
})).default([]) })).default([])
}).with('to-page', 'page-size').validate(data); }).with('to-page', 'page-size').validate(data);
} }

View File

@ -1,7 +1,7 @@
import Joi from '@hapi/joi'; import Joi from '@hapi/joi';
import IdValidate from './id'; import IdValidate from './id';
// TODO: do not allow a . in the name // TODO: do not allow a . in the name !!!
export default class TemplateValidate { export default class TemplateValidate {
private static template = { private static template = {
name: Joi.string() name: Joi.string()

View File

@ -99,7 +99,7 @@
{ {
"_id": {"$oid":"400000000000000000000007"}, "_id": {"$oid":"400000000000000000000007"},
"number": "34", "number": "34",
"type": "liquid", "type": "other",
"color": "black", "color": "black",
"batch": "", "batch": "",
"condition": {}, "condition": {},

View File

@ -29,7 +29,10 @@ export default class TestHelper {
} }
static beforeEach (server, done) { static beforeEach (server, done) {
delete require.cache[require.resolve('../index')]; // prevent loading from cache // delete cached server code except models as these are needed in the testing files as well
Object.keys(require.cache).filter(e => /API\\dist\\(?!(models|db|test))/.test(e)).forEach(key => {
delete require.cache[key]; // prevent loading from cache
});
server = require('../index'); server = require('../index');
db.drop(err => { // reset database db.drop(err => { // reset database
if (err) return done(err); if (err) return done(err);
@ -38,10 +41,13 @@ export default class TestHelper {
return server return server
} }
static request (server, done, options) { // options in form: {method, url, contentType, auth: {key/basic: 'name' or 'key'/{name, pass}}, httpStatus, req, res, default (set to false if you want to dismiss default .end handling)} // options in form: {method, url, contentType, auth: {key/basic: 'name' or 'key'/{name, pass}}, httpStatus, req, res,
// default (set to false if you want to dismiss default .end handling)}
static request (server, done, options) {
let st = supertest(server); let st = supertest(server);
if (options.hasOwnProperty('auth') && options.auth.hasOwnProperty('key')) { // resolve API key if (options.hasOwnProperty('auth') && options.auth.hasOwnProperty('key')) { // resolve API key
options.url += '?key=' + (this.auth.hasOwnProperty(options.auth.key)? this.auth[options.auth.key].key : options.auth.key); options.url += '?key=' +
(this.auth.hasOwnProperty(options.auth.key)? this.auth[options.auth.key].key : options.auth.key);
} }
switch (options.method) { // http method switch (options.method) { // http method
case 'get': case 'get':
@ -91,10 +97,12 @@ export default class TestHelper {
done(); done();
}); });
} }
else if (options.hasOwnProperty('log')) { // check changelog, takes log: {collection, skip, data/(dataAdd, dataIgn)} // check changelog, takes log: {collection, skip, data/(dataAdd, dataIgn)}
else if (options.hasOwnProperty('log')) {
return st.end(err => { return st.end(err => {
if (err) return done (err); if (err) return done (err);
ChangelogModel.findOne({}).sort({_id: -1}).skip(options.log.skip? options.log.skip : 0).lean().exec((err, data) => { // latest entry ChangelogModel.findOne({}).sort({_id: -1}).skip(options.log.skip? options.log.skip : 0)
.lean().exec((err, data) => { // latest entry
if (err) return done(err); if (err) return done(err);
should(data).have.only.keys('_id', 'action', 'collectionName', 'conditions', 'data', 'user_id', '__v'); should(data).have.only.keys('_id', 'action', 'collectionName', 'conditions', 'data', 'user_id', '__v');
should(data).have.property('action', options.method.toUpperCase() + ' ' + options.url); should(data).have.property('action', options.method.toUpperCase() + ' ' + options.url);