Commit 084bbb62 authored by Tobinsk's avatar Tobinsk
Browse files

Merge branch '2-use-objects' into 'master'

Resolve "use objects"

Closes #2

See merge request !4
parents 918e8c44 4656d689
...@@ -102,8 +102,17 @@ class WikidataAnalyserTransformer extends Transform { ...@@ -102,8 +102,17 @@ class WikidataAnalyserTransformer extends Transform {
*/ */
public _transform(chunk, encoding, callback) { public _transform(chunk, encoding, callback) {
// if norm_identifiers are unset, go ahead // if norm_identifiers are unset, go ahead
const data = Buffer.from(chunk.parsed.data).toString('utf8'); let obj;
const obj = JSON.parse(data); if(Buffer.isBuffer(chunk)) {
try {
const data = Buffer.from(chunk).toString('utf8');
obj = JSON.parse(data);
} catch(err) {
return callback(null);
}
} else {
obj = chunk;
}
if (typeof obj.claims === 'undefined') { if (typeof obj.claims === 'undefined') {
return callback(null); return callback(null);
......
...@@ -23,13 +23,17 @@ class WikidataGeofilterTransformer extends Transform { ...@@ -23,13 +23,17 @@ class WikidataGeofilterTransformer extends Transform {
*/ */
public _transform(chunk, encoding, callback) { public _transform(chunk, encoding, callback) {
// check if instance of exists // check if instance of exists
let obj; let obj;
if(Buffer.isBuffer(chunk)) {
try { try {
const data = Buffer.from(chunk.parsed.data).toString('utf8'); const data = Buffer.from(chunk).toString('utf8');
obj = JSON.parse(data); obj = JSON.parse(data);
} catch (err) { } catch(err) {
return callback(null); return callback(null);
} }
} else {
obj = chunk;
}
if (typeof obj.claims === 'undefined' || typeof obj.claims.P31 === 'undefined') { if (typeof obj.claims === 'undefined' || typeof obj.claims.P31 === 'undefined') {
return callback(null); return callback(null);
......
...@@ -40,9 +40,19 @@ class WikidataNormalizerTransformer extends Transform { ...@@ -40,9 +40,19 @@ class WikidataNormalizerTransformer extends Transform {
*/ */
public _transform(chunk, encoding, callback) { public _transform(chunk, encoding, callback) {
let result = {}; let result = {};
let obj;
if(Buffer.isBuffer(chunk)) {
try {
const data = Buffer.from(chunk).toString('utf8');
obj = JSON.parse(data);
} catch(err) {
return callback(null);
}
} else {
obj = chunk;
}
try { try {
const data = Buffer.from(chunk.parsed.data).toString('utf8');
const obj = JSON.parse(data);
// helpers // helpers
const lat = Number.parseFloat(this.getValue(obj, 'P625', 'latitude')).toFixed(6); const lat = Number.parseFloat(this.getValue(obj, 'P625', 'latitude')).toFixed(6);
const lon = Number.parseFloat(this.getValue(obj, 'P625', 'longitude')).toFixed(6); const lon = Number.parseFloat(this.getValue(obj, 'P625', 'longitude')).toFixed(6);
......
...@@ -41,11 +41,59 @@ describe('Test wikidata-analyser-transformer', () => { ...@@ -41,11 +41,59 @@ describe('Test wikidata-analyser-transformer', () => {
}); });
// build chunk // build chunk
const chunck = { const chunck = Buffer.from(fs.readFileSync(__dirname + '/fixtures/place.json'))
parsed: { analyser.write(chunck);
data: Buffer.from(fs.readFileSync(__dirname + '/fixtures/place.json')), });
},
it('Try to transform json-string data', (done) => {
const options = {
objectMode: true,
nconf,
uriBuilder: coreMock.getUriBuilder(),
}; };
const analyser = new WikidataAnalyserTransformer(options);
// listener to check output
analyser.on('data', (data) => {
const testMessage = {
from: 'https://www.wikidata.org/wiki/Q13038',
to:
['https://g.co/kg/m/0gdcn4', 'http://geonames.org/8261346'],
relation: {type: 'same_as', author: undefined, weight: 1},
};
data.should.be.deep.equal(testMessage);
done();
});
// build chunk
const chunck = fs.readFileSync(__dirname + '/fixtures/place.json');
analyser.write(chunck); analyser.write(chunck);
}); });
it('Try to transform json-object data', (done) => {
const options = {
objectMode: true,
nconf,
uriBuilder: coreMock.getUriBuilder(),
};
const analyser = new WikidataAnalyserTransformer(options);
// listener to check output
analyser.on('data', (data) => {
const testMessage = {
from: 'https://www.wikidata.org/wiki/Q13038',
to:
['https://g.co/kg/m/0gdcn4', 'http://geonames.org/8261346'],
relation: {type: 'same_as', author: undefined, weight: 1},
};
data.should.be.deep.equal(testMessage);
done();
});
// build chunk
const chunck: string = fs.readFileSync(__dirname + '/fixtures/place.json', 'utf8');
analyser.write(JSON.parse(chunck));
});
}); });
...@@ -15,12 +15,7 @@ describe('Test wikidata-geofilter-transformer', () => { ...@@ -15,12 +15,7 @@ describe('Test wikidata-geofilter-transformer', () => {
it('Try to successfully identify a location', (done) => { it('Try to successfully identify a location', (done) => {
const mock = fs.readFileSync(__dirname + '/fixtures/place.json'); const mock = fs.readFileSync(__dirname + '/fixtures/place.json');
// build chunk // build chunk
const chunk = { const chunk = Buffer.from(mock);
parsed: {
data: Buffer.from(mock),
},
};
// Q1840161 = commune of chile a subclass of location // Q1840161 = commune of chile a subclass of location
const geoInstances = ['Q1840161']; const geoInstances = ['Q1840161'];
const options = { const options = {
...@@ -41,11 +36,7 @@ describe('Test wikidata-geofilter-transformer', () => { ...@@ -41,11 +36,7 @@ describe('Test wikidata-geofilter-transformer', () => {
it('Try to filter out a non location item (person)', (done) => { it('Try to filter out a non location item (person)', (done) => {
const mock = fs.readFileSync(__dirname + '/fixtures/person.json'); const mock = fs.readFileSync(__dirname + '/fixtures/person.json');
// build chunk // build chunk
const chunk = { const chunk = Buffer.from(mock);
parsed: {
data: Buffer.from(mock),
},
};
// Q1840161 = commune of chile a subclass of location // Q1840161 = commune of chile a subclass of location
const geoInstances = ['Q1840161']; const geoInstances = ['Q1840161'];
......
...@@ -48,11 +48,7 @@ describe('Test WikiNormalizer', () => { ...@@ -48,11 +48,7 @@ describe('Test WikiNormalizer', () => {
}); });
// build chunk // build chunk
const chunck = { const chunck = Buffer.from(fs.readFileSync(__dirname + '/fixtures/place.json'))
parsed: {
data: Buffer.from(fs.readFileSync(__dirname + '/fixtures/place.json')),
},
};
normalizer.write(chunck); normalizer.write(chunck);
normalizer.end(); normalizer.end();
}); });
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment