diff --git a/api/model.yaml b/api/model.yaml
index b701df4..a025ac9 100644
--- a/api/model.yaml
+++ b/api/model.yaml
@@ -1,4 +1,79 @@
-/model/{name}:
+/model/groups:
+ get:
+ summary: list all available groups
+ description: 'Auth: basic, levels: read, write, dev, admin'
+ tags:
+ - /model
+ responses:
+ 200:
+ description: all groups
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ properties:
+ group:
+ type: string
+ example: VN
+ models:
+ type: array
+ items:
+ $ref: 'api.yaml#/components/schemas/ModelItem'
+ 401:
+ $ref: 'api.yaml#/components/responses/401'
+ 500:
+ $ref: 'api.yaml#/components/responses/500'
+
+/model/{group}:
+ parameters:
+ - $ref: 'api.yaml#/components/parameters/Group'
+ post:
+ summary: add/replace model group item
+ description: 'Auth: basic, levels: dev, admin
If the given name exists, the item is replaced,
+ otherwise it is newly created'
+ tags:
+ - /model
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: 'api.yaml#/components/schemas/ModelItem'
+ responses:
+ 200:
+ $ref: 'api.yaml#/components/responses/Ok'
+ 400:
+ $ref: 'api.yaml#/components/responses/400'
+ 401:
+ $ref: 'api.yaml#/components/responses/401'
+ 403:
+ $ref: 'api.yaml#/components/responses/403'
+ 500:
+ $ref: 'api.yaml#/components/responses/500'
+
+/model/{group}/{name}:
+ parameters:
+ - $ref: 'api.yaml#/components/parameters/Group'
+ - $ref: 'api.yaml#/components/parameters/Name'
+ delete:
+ summary: remove model group item
+ description: 'Auth: basic, levels: dev, admin'
+ tags:
+ - /model
+ responses:
+ 200:
+ $ref: 'api.yaml#/components/responses/Ok'
+ 401:
+ $ref: 'api.yaml#/components/responses/401'
+ 403:
+ $ref: 'api.yaml#/components/responses/403'
+ 404:
+ $ref: 'api.yaml#/components/responses/404'
+ 500:
+ $ref: 'api.yaml#/components/responses/500'
+
+/model/file/{name}:
parameters:
- $ref: 'api.yaml#/components/parameters/Name'
get:
diff --git a/api/parameters.yaml b/api/parameters.yaml
index 67ac778..192b15a 100644
--- a/api/parameters.yaml
+++ b/api/parameters.yaml
@@ -38,4 +38,12 @@ Collection:
required: true
schema:
type: string
- example: condition
\ No newline at end of file
+ example: condition
+
+Group:
+ name: group
+ in: path
+ required: true
+ schema:
+ type: string
+ example: vn
\ No newline at end of file
diff --git a/api/schemas.yaml b/api/schemas.yaml
index b574b8b..6cb8dee 100644
--- a/api/schemas.yaml
+++ b/api/schemas.yaml
@@ -211,3 +211,15 @@ User:
items:
type: string
example: Alpha II
+
+ModelItem:
+ properties:
+ name:
+ type: string
+ example: Model 1.1
+ url:
+ type: string
+ example: https://definma-model-test.apps.de1.bosch-iot-cloud.com/predict/model1-1
+ label:
+ type: string
+ example: 'ml/g'
\ No newline at end of file
diff --git a/data_import/import.js b/data_import/import.js
index 38f4526..f8e34e6 100644
--- a/data_import/import.js
+++ b/data_import/import.js
@@ -9,27 +9,204 @@ const _ = require('lodash');
const stages = {
materials: true,
- samples: false,
- dpt: false
+ samples: true,
+ dpt: true,
+ dptFromList: false // ignores dpts not on the dptList
}
+const dptList = [
+ 'Eh01_Eh3001_01_csv.DPT',
+ 'Eh01_Eh3001_02_csv.DPT',
+ 'Eh01_Eh3001_03_csv.DPT',
+ 'Eh01_Eh3002_01_csv.DPT',
+ 'Eh01_Eh3002_02_csv.DPT',
+ 'Eh01_Eh3002_03_csv.DPT',
+ 'Eh01_Eh3003_01_csv.DPT',
+ 'Eh01_Eh3003_02_csv.DPT',
+ 'Eh01_Eh3003_03_csv.DPT',
+ 'Eh01_Eh3004_01_csv.DPT',
+ 'Eh01_Eh3004_02_csv.DPT',
+ 'Eh01_Eh3004_03_csv.DPT',
+ 'Eh01_Eh3005_01_csv.DPT',
+ 'Eh01_Eh3005_02_csv.DPT',
+ 'Eh01_Eh3005_03_csv.DPT',
+ 'Eh01_Eh3005_04_csv.DPT',
+ 'Eh01_Eh3005_05_csv.DPT',
+ 'Eh01_Eh3005_06_csv.DPT',
+ 'Eh01_Eh3005_07_csv.DPT',
+ 'Eh01_Eh3005_08_csv.DPT',
+ 'Eh01_Eh3005_09_csv.DPT',
+ 'Eh01_Eh3006_01_csv.DPT',
+ 'Eh01_Eh3006_02_csv.DPT',
+ 'Eh01_Eh3006_03_csv.DPT',
+ 'Eh01_Eh3007_01_csv.DPT',
+ 'Eh01_Eh3007_02_csv.DPT',
+ 'Eh01_Eh3007_03_csv.DPT',
+ 'Eh01_Eh3008_01_csv.DPT',
+ 'Eh01_Eh3008_02_csv.DPT',
+ 'Eh01_Eh3008_03_csv.DPT',
+ 'Eh01_Eh3009_01_csv.DPT',
+ 'Eh01_Eh3009_02_csv.DPT',
+ 'Eh01_Eh3009_03_csv.DPT',
+ 'Eh01_Eh3010_01_csv.DPT',
+ 'Eh01_Eh3010_02_csv.DPT',
+ 'Eh01_Eh3010_03_csv.DPT',
+ 'Eh01_Eh3011_01_csv.DPT',
+ 'Eh01_Eh3011_02_csv.DPT',
+ 'Eh01_Eh3011_03_csv.DPT',
+ 'Eh01_Eh3012_01_csv.DPT',
+ 'Eh01_Eh3012_02_csv.DPT',
+ 'Eh01_Eh3012_03_csv.DPT',
+ 'Eh01_Eh3013_01_csv.DPT',
+ 'Eh01_Eh3013_02_csv.DPT',
+ 'Eh01_Eh3013_03_csv.DPT',
+ 'Eh01_Eh3013_04_csv.DPT',
+ 'Eh01_Eh3013_05_csv.DPT',
+ 'Eh01_Eh3013_06_csv.DPT',
+ 'Eh01_Eh3013_07_csv.DPT',
+ 'Eh01_Eh3013_08_csv.DPT',
+ 'Eh01_Eh3013_09_csv.DPT',
+ 'Eh01_Eh3014_01_csv.DPT',
+ 'Eh01_Eh3014_02_csv.DPT',
+ 'Eh01_Eh3014_03_csv.DPT',
+ 'Eh01_Eh3015_01_csv.DPT',
+ 'Eh01_Eh3015_02_csv.DPT',
+ 'Eh01_Eh3015_03_csv.DPT',
+ 'Eh01_Eh3016_01_csv.DPT',
+ 'Eh01_Eh3016_02_csv.DPT',
+ 'Eh01_Eh3016_03_csv.DPT',
+ 'Eh01_Eh3017_01_csv.DPT',
+ 'Eh01_Eh3017_02_csv.DPT',
+ 'Eh01_Eh3017_03_csv.DPT',
+ 'Eh01_Eh3018_01_csv.DPT',
+ 'Eh01_Eh3018_02_csv.DPT',
+ 'Eh01_Eh3018_03_csv.DPT',
+ 'Eh01_Eh3019_01_csv.DPT',
+ 'Eh01_Eh3019_02_csv.DPT',
+ 'Eh01_Eh3019_03_csv.DPT',
+ 'Eh01_Eh3020_01_csv.DPT',
+ 'Eh01_Eh3020_02_csv.DPT',
+ 'Eh01_Eh3020_03_csv.DPT',
+ 'Eh01_Eh3021_01_csv.DPT',
+ 'Eh01_Eh3021_02_csv.DPT',
+ 'Eh01_Eh3021_03_csv.DPT',
+ 'Eh01_Eh3022_01_csv.DPT',
+ 'Eh01_Eh3022_02_csv.DPT',
+ 'Eh01_Eh3022_03_csv.DPT',
+ 'Eh01_Eh4001_01_csv.DPT',
+ 'Eh01_Eh4001_02_csv.DPT',
+ 'Eh01_Eh4001_03_csv.DPT',
+ 'Eh01_Eh4001_04_csv.DPT',
+ 'Eh01_Eh4001_05_csv.DPT',
+ 'Eh01_Eh4001_06_csv.DPT',
+ 'Eh01_Eh4001_07_csv.DPT',
+ 'Eh01_Eh4001_08_csv.DPT',
+ 'Eh01_Eh4001_09_csv.DPT',
+ 'Eh01_Eh4002_01_csv.DPT',
+ 'Eh01_Eh4002_02_csv.DPT',
+ 'Eh01_Eh4002_03_csv.DPT',
+ 'Eh01_Eh4003_01_csv.DPT',
+ 'Eh01_Eh4003_02_csv.DPT',
+ 'Eh01_Eh4003_03_csv.DPT',
+ 'Eh01_Eh4003_04_csv.DPT',
+ 'Eh01_Eh4003_05_csv.DPT',
+ 'Eh01_Eh4003_06_csv.DPT',
+ 'Eh01_Eh4003_07_csv.DPT',
+ 'Eh01_Eh4003_08_csv.DPT',
+ 'Eh01_Eh4003_09_csv.DPT',
+ 'Eh01_Eh4004_01_csv.DPT',
+ 'Eh01_Eh4004_02_csv.DPT',
+ 'Eh01_Eh4004_03_csv.DPT',
+ 'Eh01_Eh4004_04_csv.DPT',
+ 'Eh01_Eh4004_05_csv.DPT',
+ 'Eh01_Eh4004_06_csv.DPT',
+ 'Eh01_Eh4004_07_csv.DPT',
+ 'Eh01_Eh4004_08_csv.DPT',
+ 'Eh01_Eh4004_09_csv.DPT',
+ 'Eh01_Eh4005_01_csv.DPT',
+ 'Eh01_Eh4005_02_csv.DPT',
+ 'Eh01_Eh4005_03_csv.DPT',
+ 'Eh01_Eh4005_04_csv.DPT',
+ 'Eh01_Eh4005_05_csv.DPT',
+ 'Eh01_Eh4005_06_csv.DPT',
+ 'Eh01_Eh4005_07_csv.DPT',
+ 'Eh01_Eh4005_08_csv.DPT',
+ 'Eh01_Eh4005_09_csv.DPT',
+ 'Eh01_Eh4006_01_csv.DPT',
+ 'Eh01_Eh4006_02_csv.DPT',
+ 'Eh01_Eh4006_03_csv.DPT',
+ 'Eh01_Eh4006_04_csv.DPT',
+ 'Eh01_Eh4006_05_csv.DPT',
+ 'Eh01_Eh4006_06_csv.DPT',
+ 'Eh01_Eh4006_07_csv.DPT',
+ 'Eh01_Eh4006_08_csv.DPT',
+ 'Eh01_Eh4006_09_csv.DPT',
+ 'Eh01_Eh4007_01_csv.DPT',
+ 'Eh01_Eh4007_02_csv.DPT',
+ 'Eh01_Eh4007_03_csv.DPT',
+ 'Eh01_Eh4007_04_csv.DPT',
+ 'Eh01_Eh4007_05_csv.DPT',
+ 'Eh01_Eh4007_06_csv.DPT',
+ 'Eh01_Eh4007_07_csv.DPT',
+ 'Eh01_Eh4007_08_csv.DPT',
+ 'Eh01_Eh4007_09_csv.DPT',
+ 'Eh01_Eh4008_01_csv.DPT',
+ 'Eh01_Eh4008_02_csv.DPT',
+ 'Eh01_Eh4008_03_csv.DPT',
+ 'Eh01_Eh4008_04_csv.DPT',
+ 'Eh01_Eh4008_05_csv.DPT',
+ 'Eh01_Eh4008_06_csv.DPT',
+ 'Eh01_Eh4008_07_csv.DPT',
+ 'Eh01_Eh4008_08_csv.DPT',
+ 'Eh01_Eh4008_09_csv.DPT',
+ 'Eh01_Eh4009_01_csv.DPT',
+ 'Eh01_Eh4009_02_csv.DPT',
+ 'Eh01_Eh4009_03_csv.DPT',
+ 'Eh01_Eh4009_04_csv.DPT',
+ 'Eh01_Eh4009_05_csv.DPT',
+ 'Eh01_Eh4009_06_csv.DPT',
+ 'Eh01_Eh4009_07_csv.DPT',
+ 'Eh01_Eh4009_08_csv.DPT',
+ 'Eh01_Eh4009_09_csv.DPT',
+ 'Eh01_Eh4010_01_csv.DPT',
+ 'Eh01_Eh4010_02_csv.DPT',
+ 'Eh01_Eh4010_03_csv.DPT',
+ 'Eh01_Eh4010_04_csv.DPT',
+ 'Eh01_Eh4010_05_csv.DPT',
+ 'Eh01_Eh4010_06_csv.DPT',
+ 'Eh01_Eh4010_07_csv.DPT',
+ 'Eh01_Eh4010_08_csv.DPT',
+ 'Eh01_Eh4010_09_csv.DPT',
+ 'Eh01_Eh4011_01_csv.DPT',
+ 'Eh01_Eh4011_02_csv.DPT',
+ 'Eh01_Eh4011_03_csv.DPT',
+ 'Eh01_Eh4011_04_csv.DPT',
+ 'Eh01_Eh4011_05_csv.DPT',
+ 'Eh01_Eh4011_06_csv.DPT',
+ 'Eh01_Eh4011_07_csv.DPT',
+ 'Eh01_Eh4011_08_csv.DPT',
+ 'Eh01_Eh4011_09_csv.DPT'
+];
+
const docs = [
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2_A.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2_B.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Ap.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Bj.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh_B.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh_Duroplasten.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell_A.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell_B.csv",
- "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_WaP.csv",
+ "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\DPPD_DPPD-frei.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2_A.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata__AnP2_B.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Ap.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Bj.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh_B.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Eh_Duroplasten.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell_A.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_Rng_aktuell_B.csv",
+ // "C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\Metadata_WaP.csv",
];
const errors = [];
const nmDocs = 'C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\nmDocs'; // NormMaster Documents
-const dptFiles = 'C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\DPT'; // Spectrum files
+// const dptFiles = 'C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\DPT'; // Spectrum files
+const dptFiles = 'C:\\Users\\vle2fe\\Documents\\Data\\All_200807\\DPT_DPPD'; // Spectrum files
const host = 'http://localhost:3000';
// const host = 'https://definma-api.apps.de1.bosch-iot-cloud.com';
const requiredProperties = ['samplenumber','materialnumber','materialname','supplier','reinforcementmaterial','material','granulate/part','color','charge/batch','comments'];
@@ -81,7 +258,7 @@ async function main() {
for (let i in docs) {
await importCsv(docs[i]);
await allMaterials();
- // await saveMaterials();
+ await saveMaterials();
}
fs.writeFileSync('./data_import/numberToColor.json', JSON.stringify(numberToColor));
fs.writeFileSync('./data_import/duplicateMNames.json', JSON.stringify(duplicateMNamesLog));
@@ -140,6 +317,7 @@ async function importCsv(doc) {
'charge/batchgranulate/part': 'charge/batch',
'charge': 'charge/batch',
'anmerkung': 'comments',
+ 'vz': 'vz(ml/g)',
'vz[ml/g]': 'vz(ml/g)',
'vz[cm³/g]': 'vz(ml/g)',
'abbau(%)': 'degradation(%)',
@@ -247,116 +425,118 @@ async function allDpts() {
res.data.forEach(sample => {
sampleIds[sample.number] = sample._id;
});
- const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+[_.]\d+)(_JDX)?[.]{1,2}(DPT|csv|CSV|JDX)/;
+ const dptRegex = /(.*?)_(.*?)_(\d+|[a-zA-Z0-9]+[_.]\d+)(_JDX|_csv)?[.]{1,2}(DPT|csv|CSV|JDX)/;
const dpts = fs.readdirSync(dptFiles);
for (let i in dpts) {
- let regexInput;
- const bjRes = /^(Bj[FT]?)\s?([a-z0-9_]*)_JDX.DPT/.exec(dpts[i]);
- if (bjRes) { // correct Bj numbers with space
- regexInput = `Bj01_${bjRes[1]}${bjRes[2]}_0.DPT`;
- }
- else { // remove _JDX from name
- regexInput = dpts[i].replace(/_JDX.*\./, '.');
- }
- const regexRes = dptRegex.exec(regexInput);
- if (regexRes && !sampleIds[regexRes[2]]) { // when sample number includes an additional _x instead of having _x_x for spectrum description
- regexRes[2] = `${regexRes[2]}_${regexRes[3].split('_')[0]}`;
- }
- let baseSample = null;
- if (regexRes) {
- baseSample = regexRes[2].split('_')[0];
- if (baseSample === 'Wa11') { // as Wa11 samples use all the same material
- baseSample = 'Wa11_B0_1';
+ if (!stages.dptFromList || dptList.indexOf(dpts[i]) >= 0) { // if list option active, only use from filter
+ let regexInput;
+ const bjRes = /^(Bj[FT]?)\s?([a-z0-9_]*)_JDX.DPT/.exec(dpts[i]);
+ if (bjRes) { // correct Bj numbers with space
+ regexInput = `Bj01_${bjRes[1]}${bjRes[2]}_0.DPT`;
}
- }
- if (regexRes && !sampleIds[regexRes[2]] && sampleIds[baseSample]) { // when number_abx does not exist but number
- dptSampleAddLog.push(`Trying to find ${baseSample}`);
- dptSampleAddLog.push(host + '/sample/' + sampleIds[baseSample]);
- res = await axios({ // get base sample
- method: 'get',
- url: host + '/sample/' + stripSpaces(sampleIds[baseSample]),
- auth: {
- username: 'admin',
- password: 'Abc123!#'
+ else { // remove _JDX from name
+ regexInput = dpts[i].replace(/_JDX.*\./, '.');
+ }
+ const regexRes = dptRegex.exec(regexInput);
+ if (regexRes && !sampleIds[regexRes[2]]) { // when sample number includes an additional _x instead of having _x_x for spectrum description
+ regexRes[2] = `${regexRes[2]}_${regexRes[3].split('_')[0]}`;
+ }
+ let baseSample = null;
+ if (regexRes) {
+ baseSample = regexRes[2].split('_')[0];
+ if (baseSample === 'Wa11') { // as Wa11 samples use all the same material
+ baseSample = 'Wa11_B0_1';
}
- }).catch(err => {
- if (err.response) {
- console.error(err.response.data);
- errors.push(`DPT Could not fetch sample ${baseSample}: ${JSON.stringify(err.response.data)}`);
- }
- });
- if (res) {
- const data = _.merge(_.pick(res.data, ['color', 'type', 'batch']),
- {number: regexRes[2], condition: {}, notes: {}, material_id: res.data.material._id});
- res = await axios({
- method: 'post',
- url: host + '/sample/new',
+ }
+ if (regexRes && !sampleIds[regexRes[2]] && sampleIds[baseSample]) { // when number_abx does not exist but number
+ dptSampleAddLog.push(`Trying to find ${baseSample}`);
+ dptSampleAddLog.push(host + '/sample/' + sampleIds[baseSample]);
+ res = await axios({ // get base sample
+ method: 'get',
+ url: host + '/sample/' + stripSpaces(sampleIds[baseSample]),
auth: {
- username: res.data.user,
- password: res.data.user === 'admin' ? 'Abc123!#' : '2020DeFinMachen!'
- },
- data
+ username: 'admin',
+ password: 'Abc123!#'
+ }
}).catch(err => {
if (err.response) {
console.error(err.response.data);
- errors.push(`DPT Could not save sample ${data}: ${err.response.data}`);
+ errors.push(`DPT Could not fetch sample ${baseSample}: ${JSON.stringify(err.response.data)}`);
}
});
- if (res.data) {
- dptSampleAddLog.push(`${regexRes[2]} from ${baseSample}`)
- sampleIds[regexRes[2]] = res.data._id;
+ if (res) {
+ const data = _.merge(_.pick(res.data, ['color', 'type', 'batch']),
+ {number: regexRes[2], condition: {}, notes: {}, material_id: res.data.material._id});
+ res = await axios({
+ method: 'post',
+ url: host + '/sample/new',
+ auth: {
+ username: res.data.user,
+ password: res.data.user === 'admin' ? 'Abc123!#' : '2020DeFinMachen!'
+ },
+ data
+ }).catch(err => {
+ if (err.response) {
+ console.error(err.response.data);
+ errors.push(`DPT Could not save sample ${data}: ${err.response.data}`);
+ }
+ });
+ if (res.data) {
+ dptSampleAddLog.push(`${regexRes[2]} from ${baseSample}`)
+ sampleIds[regexRes[2]] = res.data._id;
+ }
}
}
- }
- if (regexRes && sampleIds[regexRes[2]]) { // found matching sample
- console.log(`${i}/${dpts.length} ${dpts[i]} -> ${regexRes[2]}`);
- dptLog.push(`${dpts[i]}, ${regexRes[2]}`);
- const f = fs.readFileSync(dptFiles + '\\' + dpts[i], 'utf-8');
- const data = {
- sample_id: sampleIds[regexRes[2]],
- values: {},
- measurement_template
- };
- data.values.device = regexRes[1];
- data.values.filename = dpts[i];
- data.values.dpt = f.split('\r\n').map(e => e.split(',').map(e => parseFloat(e)));
- let rescale = false;
- for (let i in data.values.dpt) {
- if (data.values.dpt[i][1] > 10) {
- rescale = true;
- break;
+ if (regexRes && sampleIds[regexRes[2]]) { // found matching sample
+ console.log(`${i}/${dpts.length} ${dpts[i]} -> ${regexRes[2]}`);
+ dptLog.push(`${dpts[i]}, ${regexRes[2]}`);
+ const f = fs.readFileSync(dptFiles + '\\' + dpts[i], 'utf-8');
+ const data = {
+ sample_id: sampleIds[regexRes[2]],
+ values: {},
+ measurement_template
+ };
+ data.values.device = regexRes[1];
+ data.values.filename = dpts[i];
+ data.values.dpt = f.split('\r\n').map(e => e.split(',').map(e => parseFloat(e)));
+ let rescale = false;
+ for (let i in data.values.dpt) {
+ if (data.values.dpt[i][1] > 10) {
+ rescale = true;
+ break;
+ }
}
- }
- if (rescale) {
- data.values.dpt = data.values.dpt.map(e => [e[0], e[1] / 100]);
- }
- await axios({
- method: 'post',
- url: host + '/measurement/new',
- auth: {
- username: 'admin',
- password: 'Abc123!#'
- },
- data
- }).catch(err => {
- console.log(dpts[i]);
- if (err.response) {
- console.error(err.response.data);
- errors.push(`Could not upload ${dpts[i]} for sample ${regexRes[2]}: ${err.response.data}`);
+ if (rescale) {
+ data.values.dpt = data.values.dpt.map(e => [e[0], e[1] / 100]);
}
- else {
- console.error(err);
- errors.push(`Could not upload ${dpts[i]} for sample ${regexRes[2]}: ${JSON.stringify(err)}`);
- }
- });
- }
- else {
- console.log(`Could not find sample for ${dpts[i]}`);
- if (regexRes) {
- errors.push(`Could not find sample for ${dpts[i]}; [DEBUG] ${regexRes[2]}, ${!sampleIds[regexRes[2]]}, ${sampleIds[baseSample]}`);
+ await axios({
+ method: 'post',
+ url: host + '/measurement/new',
+ auth: {
+ username: 'admin',
+ password: 'Abc123!#'
+ },
+ data
+ }).catch(err => {
+ console.log(dpts[i]);
+ if (err.response) {
+ console.error(err.response.data);
+ errors.push(`Could not upload ${dpts[i]} for sample ${regexRes[2]}: ${err.response.data}`);
+ }
+ else {
+ console.error(err);
+ errors.push(`Could not upload ${dpts[i]} for sample ${regexRes[2]}: ${JSON.stringify(err)}`);
+ }
+ });
}
else {
- errors.push(`Could not find sample for ${dpts[i]} (did not match RegEx)`);
+ console.log(`Could not find sample for ${dpts[i]}`);
+ if (regexRes) {
+ errors.push(`Could not find sample for ${dpts[i]}; [DEBUG] ${regexRes[2]}, ${!sampleIds[regexRes[2]]}, ${sampleIds[baseSample]}`);
+ }
+ else {
+ errors.push(`Could not find sample for ${dpts[i]} (did not match RegEx)`);
+ }
}
}
}
@@ -399,6 +579,7 @@ async function allMcVn() {
for (let index in data) {
console.info(`MC/VN ${index}/${data.length}`);
let sample = data[index];
+ console.log(sample);
sample['samplenumber'] = sample['samplenumber'].replace(/[A-Z][a-z]0\d_/, '');
let credentials = ['admin', 'Abc123!#'];
if (sampleDevices[sample['samplenumber']]) {
@@ -671,10 +852,6 @@ async function allMaterials() {
// process all samples
for (let index in data) {
let sample = data[index];
- // TODO: remove next if, only for duplicate m names
- if (duplicateMNames.indexOf(sample['materialname']) >= 0) {
- duplicateMNamesLog.push(sample);
- }
if (sample['supplier'] === '') { // empty supplier fields
sample['supplier'] = 'unknown';
}
@@ -700,7 +877,7 @@ async function allMaterials() {
}
}
else { // new material
- // console.info(`MATERIAL LOAD ${index}/${data.length} ${sample['materialname']}`);
+ console.info(`MATERIAL LOAD ${index}/${data.length} ${sample['materialname']}`);
materials[sample['materialname']] = {
name: trim(sample['materialname']),
supplier: trim(sample['supplier']),
diff --git a/src/helpers/flatten.ts b/src/helpers/flatten.ts
index deb3af9..ed54da9 100644
--- a/src/helpers/flatten.ts
+++ b/src/helpers/flatten.ts
@@ -7,8 +7,8 @@ export default function flatten (data, keepArray = false) { // flatten object:
result[prop] = cur;
}
else if (prop === `${globals.spectrum.spectrum}.${globals.spectrum.dpt}`) {
- result[prop + '.labels'] = cur.map(e => e[0]);
- result[prop + '.values'] = cur.map(e => e[1]);
+ result[prop + '.labels'] = cur.map(e => parseFloat(e[0]));
+ result[prop + '.values'] = cur.map(e => parseFloat(e[1]));
}
else if (Array.isArray(cur)) {
if (keepArray) {
diff --git a/src/models/model.ts b/src/models/model.ts
index 925601f..92d555d 100644
--- a/src/models/model.ts
+++ b/src/models/model.ts
@@ -1,8 +1,20 @@
import mongoose from 'mongoose';
+import db from '../db';
const ModelSchema = new mongoose.Schema({
- name: {type: String, index: {unique: true}},
- data: Buffer
+ group: {type: String, index: {unique: true}},
+ models: [new mongoose.Schema({
+ name: String,
+ url: String,
+ label: String
+ } ,{ _id : false })]
});
+// changelog query helper
+ModelSchema.query.log = function > (req) {
+ db.log(req, this);
+ return this;
+}
+ModelSchema.index({group: 1});
+
export default mongoose.model>('model', ModelSchema);
\ No newline at end of file
diff --git a/src/models/model_file.ts b/src/models/model_file.ts
new file mode 100644
index 0000000..891353a
--- /dev/null
+++ b/src/models/model_file.ts
@@ -0,0 +1,8 @@
+import mongoose from 'mongoose';
+
+const ModelFileSchema = new mongoose.Schema({
+ name: {type: String, index: {unique: true}},
+ data: Buffer
+});
+
+export default mongoose.model>('model_file', ModelFileSchema);
\ No newline at end of file
diff --git a/src/routes/model.spec.ts b/src/routes/model.spec.ts
index 382efd4..aefe226 100644
--- a/src/routes/model.spec.ts
+++ b/src/routes/model.spec.ts
@@ -1,6 +1,8 @@
import should from 'should/as-function';
-import ModelModel from '../models/model';
+import ModelFileModel from '../models/model_file';
import TestHelper from "../test/helper";
+import ModelModel from '../models/model';
+import _ from 'lodash';
describe('/model', () => {
@@ -10,11 +12,269 @@ describe('/model', () => {
afterEach(done => TestHelper.afterEach(server, done));
after(done => TestHelper.after(done));
- describe('GET /model/{name}', (() => {
+ describe('GET /model/groups', () => {
+ it('returns all groups', done => {
+ TestHelper.request(server, done, {
+ method: 'get',
+ url: '/model/groups',
+ auth: {basic: 'janedoe'},
+ httpStatus: 200,
+ }).end((err, res) => {
+ if (err) return done (err);
+ const json = require('../test/db.json');
+ should(res.body).have.lengthOf(json.collections.models.length);
+ should(res.body).matchEach(group => {
+ should(group).have.only.keys('group', 'models');
+ should(group).have.property('group').be.type('string');
+ should(group.models).matchEach(model => {
+ should(model).have.only.keys('name', 'url', 'label');
+ should(model).have.property('name').be.type('string');
+ should(model).have.property('url').be.type('string');
+ should(model).have.property('label').be.type('string');
+ });
+ });
+ done();
+ });
+ });
+ it('rejects an API key', done => {
+ TestHelper.request(server, done, {
+ method: 'get',
+ url: '/model/groups',
+ auth: {key: 'janedoe'},
+ httpStatus: 401,
+ });
+ });
+ it('rejects an unauthorized request', done => {
+ TestHelper.request(server, done, {
+ method: 'get',
+ url: '/model/groups',
+ httpStatus: 401,
+ });
+ });
+ });
+
+ describe('POST /model/{group}', () => {
+ it('adds a new model', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 200,
+ req: {name: 'Model C', url: 'http://model-c.com', label: 'ml/g'}
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.findOne({group: 'VN'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ const model = res.models.find(e => e.name === 'Model C');
+ should(model).have.property('url', 'http://model-c.com');
+ should(model).have.property('label', 'ml/g');
+ done();
+ });
+ });
+ });
+ it('adds a new group', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/classification',
+ auth: {basic: 'admin'},
+ httpStatus: 200,
+ req: {name: 'Model 0.1', url: 'http://model-0-1.com', label: 'group'}
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.findOne({group: 'classification'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ should(_.omit(res, ['_id', '__v'])).be.eql({group: 'classification', models: [{name: 'Model 0.1', url: 'http://model-0-1.com', label: 'group'}]});
+ done();
+ });
+ });
+ });
+ it('replaces a model', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 200,
+ req: {name: 'Model A', url: 'http://model-a-new.com', label: 'ml/cm3'}
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.findOne({group: 'VN'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ const model = res.models.find(e => e.name === 'Model A');
+ should(model).have.property('url', 'http://model-a-new.com');
+ should(model).have.property('label', 'ml/cm3');
+ done();
+ });
+ });
+ });
+ it('accepts an empty label', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 200,
+ req: {name: 'Model C', url: 'http://model-c.com', label: ''}
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.findOne({group: 'VN'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ const model = res.models.find(e => e.name === 'Model C');
+ should(model).have.property('url', 'http://model-c.com');
+ should(model).have.property('label', '');
+ done();
+ });
+ });
+ });
+ it('rejects an empty name', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 400,
+ req: {name: '', url: 'http://model-c.com', label: 'ml/g'},
+ res:{status: 'Invalid body format', details: '"name" is not allowed to be empty'}
+ });
+ });
+ it('rejects a missing name', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 400,
+ req: {url: 'http://model-c.com', label: 'ml/g'},
+ res:{status: 'Invalid body format', details: '"name" is required'}
+ });
+ });
+ it('rejects an invalid URL', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 400,
+ req: {name: 'Model C', url: 'model-c', label: 'ml/g'},
+ res:{status: 'Invalid body format', details: '"url" must be a valid uri'}
+ });
+ });
+ it('rejects a missing URL', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'admin'},
+ httpStatus: 400,
+ req: {name: 'Model C', label: 'ml/g'},
+ res:{status: 'Invalid body format', details: '"url" is required'}
+ });
+ });
+ it('rejects a write user', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {basic: 'janedoe'},
+ httpStatus: 403,
+ req: {name: 'Model C', url: 'http://model-c.com', label: 'ml/g'}
+ });
+ });
+ it('rejects an API key', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ auth: {key: 'admin'},
+ httpStatus: 401,
+ req: {name: 'Model C', url: 'http://model-c.com', label: 'ml/g'}
+ });
+ });
+ it('rejects an unauthorized request', done => {
+ TestHelper.request(server, done, {
+ method: 'post',
+ url: '/model/VN',
+ httpStatus: 401,
+ req: {name: 'Model C', url: 'http://model-c.com', label: 'ml/g'}
+ });
+ });
+ });
+
+ describe('DELETE /model/{group}/{name}', () => {
+ it('deletes the model', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/VN/Model%20A',
+ auth: {basic: 'admin'},
+ httpStatus: 200
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.findOne({group: 'VN'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ should(_.omit(res, ['_id'])).be.eql({group: 'VN', models: [{name: 'Model B', url: 'http://model-b.com', label: 'ml/g'}]});
+ done();
+ });
+ });
+ });
+ it('deletes the group, if empty afterwards', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/Moisture/Model%201',
+ auth: {basic: 'admin'},
+ httpStatus: 200
+ }).end((err, res) => {
+ if (err) return done(err);
+ should(res.body).be.eql({status: 'OK'});
+ ModelModel.find({group: 'Moisture'}).lean().exec((err, res) => {
+ if (err) return done(err);
+ should(res).have.lengthOf(0);
+ done();
+ });
+ });
+ });
+ it('returns 404 for an unknown group', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/xxx/Model%201',
+ auth: {basic: 'admin'},
+ httpStatus: 404
+ });
+ });
+ it('returns 404 for an unknown model', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/VN/xxx',
+ auth: {basic: 'admin'},
+ httpStatus: 404
+ });
+ });
+ it('rejects an API key', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/VN/Model%20A',
+ auth: {key: 'admin'},
+ httpStatus: 401
+ });
+ });
+ it('rejects a write user', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/VN/Model%20A',
+ auth: {basic: 'janedoe'},
+ httpStatus: 403
+ });
+ });
+ it('rejects an unauthorized request', done => {
+ TestHelper.request(server, done, {
+ method: 'delete',
+ url: '/model/VN/Model%20A',
+ httpStatus: 401
+ });
+ });
+ });
+
+ describe('GET /model/file/{name}', (() => {
it('returns the binary data', done => {
TestHelper.request(server, done, {
method: 'get',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {basic: 'admin'},
httpStatus: 200,
contentType: 'application/octet-stream; charset=utf-8',
@@ -27,7 +287,7 @@ describe('/model', () => {
it('returns the binary data for an API key', done => {
TestHelper.request(server, done, {
method: 'get',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {key: 'admin'},
httpStatus: 200,
contentType: 'application/octet-stream; charset=utf-8',
@@ -40,7 +300,7 @@ describe('/model', () => {
it('returns 404 for an unknown name', done => {
TestHelper.request(server, done, {
method: 'get',
- url: '/model/modelx',
+ url: '/model/file/modelx',
auth: {basic: 'admin'},
httpStatus: 404
})
@@ -48,7 +308,7 @@ describe('/model', () => {
it('rejects requests from a write user', done => {
TestHelper.request(server, done, {
method: 'get',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {basic: 'janedoe'},
httpStatus: 403
})
@@ -56,17 +316,17 @@ describe('/model', () => {
it('rejects unauthorized requests', done => {
TestHelper.request(server, done, {
method: 'get',
- url: '/model/modela',
+ url: '/model/file/modela',
httpStatus: 401
})
});
}));
- describe('POST /model/{name}', () => {
+ describe('POST /model/file/{name}', () => {
it('stores the data', done => {
TestHelper.request(server, done, {
method: 'post',
- url: '/model/modelb',
+ url: '/model/file/modelb',
auth: {basic: 'admin'},
httpStatus: 200,
reqContentType: 'application/octet-stream',
@@ -74,7 +334,7 @@ describe('/model', () => {
}).end((err, res) => {
if (err) return done (err);
should(res.body).be.eql({status: 'OK'});
- ModelModel.find({name: 'modelb'}).lean().exec((err, data) => {
+ ModelFileModel.find({name: 'modelb'}).lean().exec((err, data) => {
if (err) return done (err);
should(data).have.lengthOf(1);
should(data[0]).have.only.keys('_id', 'name', 'data', '__v');
@@ -87,7 +347,7 @@ describe('/model', () => {
it('stores the data with an API key', done => {
TestHelper.request(server, done, {
method: 'post',
- url: '/model/modelb',
+ url: '/model/file/modelb',
auth: {key: 'admin'},
httpStatus: 200,
reqContentType: 'application/octet-stream',
@@ -95,7 +355,7 @@ describe('/model', () => {
}).end((err, res) => {
if (err) return done (err);
should(res.body).be.eql({status: 'OK'});
- ModelModel.find({name: 'modelb'}).lean().exec((err, data) => {
+ ModelFileModel.find({name: 'modelb'}).lean().exec((err, data) => {
if (err) return done (err);
should(data).have.lengthOf(1);
should(data[0]).have.only.keys('_id', 'name', 'data', '__v');
@@ -108,7 +368,7 @@ describe('/model', () => {
it('overwrites existing data', done => {
TestHelper.request(server, done, {
method: 'post',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {basic: 'admin'},
httpStatus: 200,
reqContentType: 'application/octet-stream',
@@ -116,7 +376,7 @@ describe('/model', () => {
}).end((err, res) => {
if (err) return done (err);
should(res.body).be.eql({status: 'OK'});
- ModelModel.find({name: 'modela'}).lean().exec((err, data) => {
+ ModelFileModel.find({name: 'modela'}).lean().exec((err, data) => {
if (err) return done (err);
should(data).have.lengthOf(1);
should(data[0]).have.only.keys('_id', 'name', 'data', '__v');
@@ -129,7 +389,7 @@ describe('/model', () => {
it('rejects requests from a write user', done => {
TestHelper.request(server, done, {
method: 'post',
- url: '/model/modelb',
+ url: '/model/file/modelb',
auth: {basic: 'janedoe'},
httpStatus: 403,
req: 'another binary data'
@@ -138,24 +398,24 @@ describe('/model', () => {
it('rejects unauthorized requests', done => {
TestHelper.request(server, done, {
method: 'post',
- url: '/model/modelb',
+ url: '/model/file/modelb',
httpStatus: 401,
req: 'another binary data'
});
});
});
- describe('DELETE /model/{name}', () => {
+ describe('DELETE /model/file/{name}', () => {
it('deletes the data', done => {
TestHelper.request(server, done, {
method: 'delete',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {basic: 'admin'},
httpStatus: 200
}).end((err, res) => {
if (err) return done(err);
should(res.body).be.eql({status: 'OK'});
- ModelModel.find({name: 'modela'}).lean().exec((err, data) => {
+ ModelFileModel.find({name: 'modela'}).lean().exec((err, data) => {
if (err) return done(err);
should(data).have.lengthOf(0);
done();
@@ -165,7 +425,7 @@ describe('/model', () => {
it('returns 404 for an unknown name', done => {
TestHelper.request(server, done, {
method: 'delete',
- url: '/model/modelx',
+ url: '/model/file/modelx',
auth: {basic: 'admin'},
httpStatus: 404
});
@@ -173,7 +433,7 @@ describe('/model', () => {
it('rejects an API key', done => {
TestHelper.request(server, done, {
method: 'delete',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {key: 'admin'},
httpStatus: 401
});
@@ -181,7 +441,7 @@ describe('/model', () => {
it('rejects a write user', done => {
TestHelper.request(server, done, {
method: 'delete',
- url: '/model/modela',
+ url: '/model/file/modela',
auth: {basic: 'janedoe'},
httpStatus: 403
});
@@ -189,7 +449,7 @@ describe('/model', () => {
it('rejects an unauthorized request', done => {
TestHelper.request(server, done, {
method: 'delete',
- url: '/model/modela',
+ url: '/model/file/modela',
httpStatus: 401
});
});
diff --git a/src/routes/model.ts b/src/routes/model.ts
index 882de58..634f637 100644
--- a/src/routes/model.ts
+++ b/src/routes/model.ts
@@ -1,14 +1,97 @@
import express from 'express';
import bodyParser from 'body-parser';
+import ModelFileModel from '../models/model_file';
import ModelModel from '../models/model';
+import _ from 'lodash';
+import ModelValidate from './validate/model';
+import res400 from './validate/res400';
+import db from '../db';
const router = express.Router();
-router.get('/model/:name', (req, res, next) => {
+router.get('/model/groups', (req, res, next) => {
+ if (!req.auth(res, ['read', 'write', 'dev', 'admin'], 'basic')) return;
+
+ ModelModel.find().lean().exec((err, data) => {
+ if (err) return next(err);
+
+ // validate all and filter null values from validation errors
+ res.json(_.compact(data.map(e => ModelValidate.output(e))));
+ });
+});
+
+router.post('/model/:group', (req, res, next) => {
+ if (!req.auth(res, ['dev', 'admin'], 'basic')) return;
+
+ const {error, value: model} = ModelValidate.input(req.body);
+ console.log(error);
+ if (error) return res400(error, res);
+
+ ModelModel.findOne({group: req.params.group}).lean().exec((err, data) => {
+ if (err) return next(err);
+
+ if (data) { // group exists
+ if (data.models.find(e => e.name === model.name)) { // name exists, overwrite
+ ModelModel.findOneAndUpdate(
+ {$and: [{group: req.params.group}, {'models.name': model.name}]},
+ {'models.$': model},
+ {upsert: true}).log(req).lean().exec(err => {
+ if (err) return next(err);
+ res.json({status: 'OK'})
+ });
+ }
+ else { // create new
+ ModelModel.findOneAndUpdate(
+ {group: req.params.group},
+ {$push: {models: model as never}}
+ ).log(req).lean().exec(err => {
+ if (err) return next(err);
+ res.json({status: 'OK'})
+ });
+ }
+ }
+ else { // create new group
+ new ModelModel({group: req.params.group, models: [model]}).save((err, data) => {
+ if (err) return next(err);
+ db.log(req, 'models', {_id: data._id}, data.toObject());
+ res.json({status: 'OK'})
+ });
+ }
+ });
+});
+
+router.delete('/model/:group(((?!file)[^\\/]+?))/:name', (req, res, next) => {
+ if (!req.auth(res, ['dev', 'admin'], 'basic')) return;
+
+ ModelModel.findOne({group: req.params.group}).lean().exec((err, data) => {
+ if (err) return next(err);
+
+ if (!data || !data.models.find(e => e.name === req.params.name)) {
+ return res.status(404).json({status: 'Not found'});
+ }
+ if (data.models.length > 1) { // only remove model
+ ModelModel.findOneAndUpdate(
+ {group: req.params.group},
+ {$pull: {models: data.models.find(e => e.name === req.params.name) as never}}
+ ).log(req).lean().exec(err => {
+ if (err) return next(err);
+ res.json({status: 'OK'})
+ });
+ }
+ else { // remove document
+ ModelModel.findOneAndDelete({group: req.params.group}).log(req).lean().exec(err => {
+ if (err) return next(err);
+ res.json({status: 'OK'})
+ });
+ }
+ });
+});
+
+router.get('/model/file/:name', (req, res, next) => {
if (!req.auth(res, ['dev', 'admin'], 'all')) return;
- ModelModel.findOne({name: req.params.name}).lean().exec((err, data) => {
+ ModelFileModel.findOne({name: req.params.name}).lean().exec((err, data) => {
if (err) return next(err);
if (data) {
res.set('Content-Type', 'application/octet-stream');
@@ -20,20 +103,20 @@ router.get('/model/:name', (req, res, next) => {
});
});
-router.post('/model/:name', bodyParser.raw({limit: '500kb'}), (req, res, next) => {
+router.post('/model/file/:name', bodyParser.raw({limit: '5mb'}), (req, res, next) => {
if (!req.auth(res, ['dev', 'admin'], 'all')) return;
- ModelModel.replaceOne({name: req.params.name}, {name: req.params.name, data: req.body}).setOptions({upsert: true})
+ ModelFileModel.replaceOne({name: req.params.name}, {name: req.params.name, data: req.body}).setOptions({upsert: true})
.lean().exec(err => {
if (err) return next(err);
res.json({status: 'OK'});
});
});
-router.delete('/model/:name', (req, res, next) => {
+router.delete('/model/file/:name', (req, res, next) => {
if (!req.auth(res, ['dev', 'admin'], 'basic')) return;
- ModelModel.findOneAndDelete({name: req.params.name}).lean().exec((err, data) => {
+ ModelFileModel.findOneAndDelete({name: req.params.name}).lean().exec((err, data) => {
if (err) return next(err);
if (data) {
res.json({status: 'OK'});
diff --git a/src/routes/sample.ts b/src/routes/sample.ts
index 7d97c64..e7a0b03 100644
--- a/src/routes/sample.ts
+++ b/src/routes/sample.ts
@@ -22,6 +22,8 @@ import globals from '../globals';
const router = express.Router();
+// TODO: do not use streaming for spectrum filenames
+
router.get('/samples', async (req, res, next) => {
if (!req.auth(res, ['read', 'write', 'dev', 'admin'], 'all')) return;
@@ -260,7 +262,9 @@ router.get('/samples', async (req, res, next) => {
// count total number of items before $skip and $limit, only works when from-id is not specified and spectra are not
// included
- if (!filters.fields.find(e => e.indexOf(globals.spectrum.spectrum + '.') >= 0) && !filters['from-id']) {
+ if (!filters.fields.find(e =>
+ e.indexOf(globals.spectrum.spectrum + '.' + globals.spectrum.dpt) >= 0) && !filters['from-id']
+ ) {
queryPtr.push({$facet: {count: [{$count: 'count'}], samples: []}});
queryPtr = queryPtr[queryPtr.length - 1].$facet.samples; // add rest of aggregation pipeline into $facet
}
@@ -364,7 +368,7 @@ router.get('/samples', async (req, res, next) => {
}
queryPtr.push({$project: projection});
// use streaming when including spectrum files
- if (!fieldsToAdd.find(e => e.indexOf(globals.spectrum.spectrum + '.') >= 0)) {
+ if (!fieldsToAdd.find(e => e.indexOf(globals.spectrum.spectrum + '.' + globals.spectrum.dpt) >= 0)) {
collection.aggregate(query).allowDiskUse(true).exec((err, data) => {
if (err) return next(err);
if (data[0] && data[0].count) {
diff --git a/src/routes/validate/model.ts b/src/routes/validate/model.ts
new file mode 100644
index 0000000..30d3179
--- /dev/null
+++ b/src/routes/validate/model.ts
@@ -0,0 +1,38 @@
+import Joi from 'joi';
+
+
+export default class ModelValidate { // validate input for model
+ private static model = {
+ group: Joi.string()
+ .disallow('file')
+ .max(128),
+
+ model: Joi.object({
+ name: Joi.string()
+ .max(128)
+ .required(),
+
+ url: Joi.string()
+ .uri()
+ .max(512)
+ .required(),
+
+ label: Joi.string()
+ .allow('')
+ .max(128)
+ .required()
+ })
+ };
+
+ static input (data) { // validate input
+ return this.model.model.required().validate(data);
+ }
+
+ static output (data) { // validate output and strip unwanted properties, returns null if not valid
+ const {value, error} = Joi.object({
+ group: this.model.group,
+ models: Joi.array().items(this.model.model)
+ }).validate(data, {stripUnknown: true});
+ return error !== undefined? null : value;
+ }
+}
\ No newline at end of file
diff --git a/src/test/db.json b/src/test/db.json
index 468c43d..7727315 100644
--- a/src/test/db.json
+++ b/src/test/db.json
@@ -678,13 +678,40 @@
"__v": 0
}
],
- "models": [
+ "model_files": [
{
"_id": {"$oid":"140000000000000000000001"},
"name": "modela",
"data": {"buffer": "binary data"}
}
],
+ "models": [
+ {
+ "group": "VN",
+ "models": [
+ {
+ "name": "Model A",
+ "url": "http://model-a.com",
+ "label": "ml/g"
+ },
+ {
+ "name": "Model B",
+ "url": "http://model-b.com",
+ "label": "ml/g"
+ }
+ ]
+ },
+ {
+ "group": "Moisture",
+ "models": [
+ {
+ "name": "Model 1",
+ "url": "http://model-1.com",
+ "label": "weight %"
+ }
+ ]
+ }
+ ],
"users": [
{
"_id": {"$oid":"000000000000000000000001"},