I have a usecase that needs to query some data in one database and then use this data as a new input into the MarkLogic DataHub pipeline.
I created a working import and harmonization flow.
Now I want to run the import flow from another database to insert data into th staging database in the dhf.
'use strict';
let id= "/ClueyTest/track/cluey/a2c5c32c-6e99-47c9-8b4d-5b97897509f7.json";
let options = {"dhf.projectName":"ClueyTest", "entity":"Track", "flow":"ImportClueyTracks", "flowType":"input", "dataFormat":"json"};
let rawContent = {
"trackId": "a2c5c32c-6e99-47c9-8b4d-5b97897509f7",
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[5.4701967, 51.8190698],
[5.470028, 51.8193624],
[5.470038, 51.8193624],
[5.470048, 51.8193624],
[5.470028, 51.8193634]]
}
,
"properties": {
"timestamps": [
"2019-02-14T16:52:06+0100",
"2019-02-14T16:51:07+0100",
"2019-02-14T16:43:24+0100",
"2019-02-14T16:43:24+0100",
"2019-02-14T16:43:24+0100"
]
}
,
"tracktype": "on",
"endTimestamp": "2019-02-14T16:51:07+0100",
"startTimestamp": "2019-02-14T14:46:50+0100"
}
const clt = require('/entities/clueyTrack/input/ImportClueyTracks/main.sjs');
// the main in the import flow
clt.main(id,rawContent,options);
Obviously you need a working importflow inside your datahub to run this code but the question is about the general usecase how to run an import flow not from gradle but from inside a marklogic database.
All dhf code is sjs.