Commit 78c89b1a authored by Tobias Steiner's avatar Tobias Steiner
Browse files

Update geolinker-common deps and lets consumer start consuming data

parent 09b2e70a
......@@ -114,9 +114,9 @@
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"avsc": {
"version": "5.2.3",
"resolved": "https://registry.npmjs.org/avsc/-/avsc-5.2.3.tgz",
"integrity": "sha512-1qoCOb6Q5q/QmMxmQu5OvKZnv3IELvoz3+rLAhfdslb3JnmtzW3hhLdLJNvpENqCgHzz9tCsdyHMk27m/7wREQ=="
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/avsc/-/avsc-5.3.0.tgz",
"integrity": "sha512-P0izoN7HA+REPZin0LMtqTl/s8wSyeDK8iZttkrfrMZTqL6W9sSCX3UtzJYhsM8X9IkvAUB5k/7GJbAE14KNMA=="
},
"aws-sign2": {
"version": "0.7.0",
......@@ -405,6 +405,11 @@
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
},
"cpu-stat": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/cpu-stat/-/cpu-stat-2.0.1.tgz",
"integrity": "sha1-UB6NbdLUTO9dhCk5w40YIsB4/Kw="
},
"cycle": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz",
......@@ -541,7 +546,7 @@
"dev": true
},
"geolinker-common": {
"version": "git+https://gitlab+deploy-token-1:vnsdCm_t84QGVA2U4kw4@source.dodis.ch/histhub/geolinker-common.git#1eced76f8df782989968bb80e2c0ad55b36752ba",
"version": "git+https://gitlab+deploy-token-1:vnsdCm_t84QGVA2U4kw4@source.dodis.ch/histhub/geolinker-common.git#256f172cbf0b4eb50ec9a18aa8c1e7779b049779",
"from": "git+https://gitlab+deploy-token-1:vnsdCm_t84QGVA2U4kw4@source.dodis.ch/histhub/geolinker-common.git",
"requires": {
"@types/es6-promise": "^3.3.0",
......@@ -550,6 +555,7 @@
"@types/winston": "^2.3.9",
"axios": "^0.18.0",
"concurrently": "^3.5.1",
"cpu-stat": "^2.0.1",
"js-yaml": "^3.11.0",
"kafka-avro": "^1.0.2",
"nconf": "^0.10.0",
......@@ -713,9 +719,9 @@
}
},
"kafka-avro": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/kafka-avro/-/kafka-avro-1.0.3.tgz",
"integrity": "sha512-5PzPTFxqr2zB+CHv8PcU8ps/3Csf3aen+JklOyt0eoPJb7dq+uH/bdIjwY9MZj/tAkU+Qdedg4ZuTJ/EmyFszg==",
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/kafka-avro/-/kafka-avro-1.0.4.tgz",
"integrity": "sha512-5fJrf8GP/Qo+7iysZZtvolv9JkzkhX1DDnfAId4jaZirqui52zWc/mYaRsk3PWvVerKCuTw1YnB90PGTOuc/4g==",
"requires": {
"avsc": "^5.2.3",
"axios": "^0.15.3",
......@@ -802,9 +808,9 @@
}
},
"moment": {
"version": "2.22.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.22.1.tgz",
"integrity": "sha512-shJkRTSebXvsVqk56I+lkb2latjBs8I+pc2TzWc545y2iFnSjm7Wg0QMh+ZWcdSLQyGEau5jI8ocnmkyTgr9YQ==",
"version": "2.22.2",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.22.2.tgz",
"integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y=",
"optional": true
},
"ms": {
......@@ -1247,9 +1253,9 @@
"integrity": "sha1-+OGqHuWlPsW/FR/6CXQqatdpeHY="
},
"winston": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/winston/-/winston-2.4.2.tgz",
"integrity": "sha512-4S/Ad4ZfSNl8OccCLxnJmNISWcm2joa6Q0YGDxlxMzH0fgSwWsjMt+SmlNwCqdpaPg3ev1HKkMBsIiXeSUwpbA==",
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/winston/-/winston-2.4.3.tgz",
"integrity": "sha512-GYKuysPz2pxYAVJD2NPsDLP5Z79SDEzPm9/j4tCjkF/n89iBNGBMJcR+dMUqxgPNgoSs6fVygPi+Vl2oxIpBuw==",
"requires": {
"async": "~1.0.0",
"colors": "1.0.x",
......
......@@ -7,12 +7,11 @@
"@types/node": "^9.6.19",
"@types/winston": "^2.3.9",
"concurrently": "^3.5.1",
"geolinker-common": "git+https://gitlab+deploy-token-1:vnsdCm_t84QGVA2U4kw4@source.dodis.ch/histhub/geolinker-common.git",
"nconf": "^0.10.0",
"neo4j-driver": "^1.6.1",
"request": "^2.87.0",
"typescript": "^2.8.4",
"geolinker-common": "git+https://gitlab+deploy-token-1:vnsdCm_t84QGVA2U4kw4@source.dodis.ch/histhub/geolinker-common.git"
"typescript": "^2.8.4"
},
"devDependencies": {
"source-map-support": "^0.5.6",
......
......@@ -16,7 +16,14 @@ const linker = new Linker(core.getNconf(), core.getNconf(), core.getUriBuilder()
*/
const kafka = core.getKafkaAvro();
kafka.init().then(() => {
/**
* Get the consumer subscribed to the topic linker
*/
kafka.getConsumer().then(async (consumer) => {
/**
* Wait for connection
*/
await consumer.connect();
/**
......@@ -30,6 +37,7 @@ kafka.init().then(() => {
* On data, write stuff to neo4j
*/
consumer.on('data', (rawData) => {
core.getReporter().setDataIn(1);
// create new node
// rawData.parsed.provider = 'geonames';
// todo: we need to profile this error
......@@ -40,8 +48,11 @@ kafka.init().then(() => {
core.getLogger().error(JSON.stringify(error));
next();
});
// start consuming
next();
});
})
});
}, (err) => {
core.getLogger().error('Error while connecting to kafka');
throw err;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment