/** * Create a new source that returns tiles from a simplestyle-supporting * GeoJSON object. * * @param {string} uri * @param {function} callback * @returns {undefined} */ function Source(id, callback) { var uri = url.parse(id); if (!uri || (uri.protocol && uri.protocol !== 'overlaydata:')) { return callback('Only the overlaydata protocol is supported'); } var data = id.replace('overlaydata://', ''); var retina = false; if (data.indexOf('2x:') === 0) { retina = true; data = data.replace(/^2x:/, ''); } if (geojsonhint.hint(data).length) { return callback('invalid geojson'); } var generated = mapnikify(JSON.parse(data), retina, function(err, xml) { if (err) return callback(err); this._xml = xml; callback(null, this); }.bind(this)); }
add: function (geojson, validateGeoJSON=true) { if (geojson.type !== 'FeatureCollection' && !geojson.geometry) { geojson = { type: 'Feature', id: geojson.id, properties: geojson.properties || {}, geometry: geojson }; } if (validateGeoJSON) { var errors = geojsonhint.hint(geojson); if (errors.length) { throw new Error(errors[0].message); } (geojson.type === 'FeatureCollection' ? geojson.features : [geojson]).forEach(feature => { if (featureTypes[feature.geometry.type] === undefined) { throw new Error(`Invalid feature type. Must be ${featureTypeStr}`); } }); } if (geojson.type === 'FeatureCollection') { return geojson.features.map(feature => this.add(feature, false)); } geojson = JSON.parse(JSON.stringify(geojson)); geojson.id = geojson.id || hat(); if (ctx.store.get(geojson.id) === undefined) { var model = featureTypes[geojson.geometry.type]; let internalFeature = new model(ctx, geojson); ctx.store.add(internalFeature); } else { let internalFeature = ctx.store.get(geojson.id); internalFeature.properties = geojson.properties; } ctx.store.render(); return geojson.id; },
function assert(str,obj) { var key; if(obj){ if(typeof str === 'string'){ str = JSON.parse(str); } for(key in str){ assert(str[key]); } return; } if(typeof str !== 'string'){ str = JSON.stringify(str); } var response = hint(str); if (!Array.isArray(response) || response.length) { throw new GeoJSONError(response); } }
/** * request nearby places * @param {object} location [geoJSON object of where to search] * @param {float} radius [radius to search by] * @param {string} rankBy [how to rank search results] * @return {function} [superagent promise] */ nearbySearch(location, radius, rankBy, keyword) { const fn = '[GoogleApi.nearbySearch]' sails.log.info(fn, location, radius, rankBy, keyword) // validate location const isValid = new Validate(location) const geoJsonErr = geojsonhint.hint(location) // reverse geoJson coordinates from [lng, lat] to [lat, lng] if (is.empty(geoJsonErr)) { location = location.coordinates.reverse() } // if no location, throw error if (!location) { let err = 'no location passed' sails.log.error(err) throw new Error(err) } // construct google nearbySearch query const query = { key: this.apiKey, location: location.toString(), radius: radius || defaults.radius, // set to defaults if no radius // rankBy: rankBy || 'distance', keyword: keyword || '' } sails.log.info('searching nearby at:', this.nearbySearchUrl, 'with query', query) return request.get(this.radarSearchUrl) .query(query) // if invalid request from google, throw error .then(results => { if (results.body.status != 'OK') { throw new Promise.OperationalError(fn, 'error:', results.body.status) } else { return results } // if }) // .then } // nearbySearch
api.add = function (geojson) { var featureCollection = normalize(geojson); var errors = geojsonhint.hint(featureCollection); if (errors.length) { throw new Error(errors[0].message); } featureCollection = JSON.parse(JSON.stringify(featureCollection)); var ids = featureCollection.features.map(feature => { feature.id = feature.id || hat(); if (feature.geometry === null) { throw new Error('Invalid geometry: null'); } if (ctx.store.get(feature.id) === undefined || ctx.store.get(feature.id).type !== feature.geometry.type) { // If the feature has not yet been created ... var model = featureTypes[feature.geometry.type]; if (model === undefined) { throw new Error(`Invalid geometry type: ${feature.geometry.type}.`); } let internalFeature = new model(ctx, feature); ctx.store.add(internalFeature); } else { // If a feature of that id has already been created, and we are swapping it out ... let internalFeature = ctx.store.get(feature.id); internalFeature.properties = feature.properties; if (!isEqual(internalFeature.getCoordinates(), feature.geometry.coordinates)) { internalFeature.incomingCoords(feature.geometry.coordinates); } } return feature.id; }); ctx.store.render(); return ids; };
stream.on('end', function () { var errors = geojsonhint.hint(data); t.ok(errors.length === 0, 'GeoJSON valid'); });
}, function(err, results) { t.ifError(err); t.deepEqual(geojsonhint.hint(results.results), [], 'results are valid'); t.ok(results, 'results are valid'); t.end(); });
exports.getImportSql = function(options, changeset) { var geojson = options.geojson, period = options.period, type = options.type, source = options.source || 1, checkNode = (_.isNumber(options.checkNode)) ? options.checkNode : 2, Period = require('../models/Period'); if (!changeset || !changeset.id) changeset = { user_id: changeset.user_id || changeset.user || 0, message: changeset.message || 'Import of GeoJSON' }; changeset.directives = []; // (1) Parse string & lint GeoJSON if (typeof geojson === 'string') geojson = JSON.parse(geojson); var geojsonErrors = geojsonhint.hint(JSON.stringify(geojson)); if (geojsonErrors.length) { return pg.queue().add([ '-- Error Importing GeoJSON:', '-- GeoJSON has ' + geojsonErrors.length + ' error(s)', '-- line ' + geojsonErrors[0].line + ': ' + geojsonErrors[0].message ].join('\n')); } // (2) Normalize GeoJSON properties geojson = exports.normalize(geojson); // (3a) Start SQL dump var sql = ''; var startSql = pg.queue(); var tempData = _.uniqueId('data_'); var Data = pg.model(tempData, { idAttribute: 'value' }); startSql.add('CREATE TEMP TABLE ' + tempData + ' (' + 'key varchar, ' + 'value varchar' + ') ON COMMIT DROP'); if (!changeset.id) { startSql.add(Changeset.insert(_.omit(changeset, 'directives'))); startSql.add(Data.insert({ key: 'changeset', value: [['LASTVAL()']] })); changeset.id = [['('+Data.select('value::bigint').where({ key: 'changeset' })+')']]; } sql += startSql.print(); // (3b) Parse each GeoJSON feature var duplicateNodes = util.getDuplicateNodes(geojson.features); // Check each shape before generating SQL geojson.features.map(function(feature) { if (geojson.when) feature.when = geojson.when; feature.properties = _.extend(feature.properties, { periods: [period], type_id: type }); return feature; }).forEach(function(feature) { sql += generateShapeSql(feature); }); return pg.queue().add(sql); // Maps nested coords into array of Way objects function getPolyWays(geom) { var coords = geom.coordinates; if (_.isNumber(coords[0][0])) coords = [coords]; if (_.isNumber(coords[0][0][0])) coords = [coords]; return _.flatten(coords.map(function(poly) { return poly.map(function(way, i) { var isInner = (i !== 0 && /Poly/.test(geom.type)); var role = isInner ? 'inner' : 'outer'; role = (/Line/.test(geom.type)) ? 'line' : role; return { coords: way, role: role }; }); })); } // Add feature to NWS structure using SQL dump function generateShapeSql(feature) { var data = feature.properties, geom = feature.geometry, coords = geom.coordinates, esc = pg.engine.escape; var nodeData = { source_id: source }; var queue = pg.queue(); // Create temporary table to store shape relations var temp = _.uniqueId('rels_'); queue.add('CREATE TEMP TABLE '+temp+' (' + 'shape_id bigint, ' + 'relation_type nws_enum, ' + 'relation_id bigint, ' + 'relation_role varchar, ' + 'sequence_id int' + ') ON COMMIT DROP'); var Temp = pg.model(temp, { idAttribute: 'sequence_id' }); // Points if (/Point/.test(geom.type)) { coords = _.isNumber(coords[0]) ? [coords] : coords; coords = _.compact(coords.map(function(node) { if (!util.verifyCoord(node)) return null; return 'create_node(' + [ esc(node[0]), esc(node[1]), esc(nodeData.source_id), esc(nodeData.tile) ].join() + ')'; })).map(function(cn, i) { return { relation_type: 'Node', relation_id: [[cn]], relation_role: 'point', sequence_id: i }; }); if (_.isEmpty(coords)) return ''; queue.add(Temp.insert(coords)); // Lines & Polygons } else { // Create ways var ways = getPolyWays(geom); queue.add(Temp.insert(ways.map(function(way, seq) { return { relation_type: 'Way', relation_id: [['cw()']], relation_role: way.role, sequence_id: seq }; }))); // Create nodes, wayNodes queue.add(ways.map(function(way, seq) { // Remove invalid coords + convert to string way.coords = _.compact(way.coords.map(function(coord) { return !util.verifyCoord(coord) ? null : coord; })); var duplicateIdxs = _.compact(way.coords.map(function(coord, i) { if (_.contains(duplicateNodes, coord+'')) return i+1; return null; })); way = [ checkNode, 'ARRAY' + JSON.stringify(way.coords), duplicateIdxs.length ? 'ARRAY' + JSON.stringify(duplicateIdxs) : 'NULL', esc(data.source_id), '(SELECT relation_id FROM '+temp+' WHERE sequence_id = '+seq+')' ].join(','); return 'SELECT create_way_nodes(' + way + ') AS n'; })); } // Get period start/end to record in shape var newData = _.clone(data); if (feature.when) { var start = dates.parse(feature.when.start || ''), end = dates.parse(feature.when.stop || ''); newData.start_year = start.year; newData.start_month = start.month; newData.start_day = start.day; newData.end_year = end.year; newData.end_month = end.month; newData.end_day = end.day; } if (!newData.start_year) { newData.start_year = [['('+Period.find(period).select('start_year')+')']]; newData.start_month = [['('+Period.find(period).select('start_month')+')']]; newData.start_day = [['('+Period.find(period).select('start_day')+')']]; } if (!newData.end_year) { newData.end_year = [['('+Period.find(period).select('end_year')+')']]; newData.end_month = [['('+Period.find(period).select('end_month')+')']]; newData.end_day = [['('+Period.find(period).select('end_day')+')']]; } // Create shape & update with new shape_id queue.add(Shape.create(newData)).add(Temp.update({ shape_id: [['LASTVAL()']] })); // Finish shape relations queue.add(Shape.Relation.insert(Temp.all()).returning('shape_id')); var directive = Changeset.Directive._parseDirectives(changeset.id, { action: 'add', object: 'shape', object_id: Temp.select('shape_id').limit(1), data: JSON.stringify(data), geometry: { type: geom.type } }); queue.add(Changeset.Directive.insert(directive).returning('changeset_id')); return '\n\n-- ' + data.name + '\n' + queue.print(); } };
t.test('decoded feature is valid GeoJSON', function (t2) { t2.plan(1); t2.deepEqual(geojsonhint.hint(JSON.stringify(decoded)), []); });
t.test('polygon is valid GeoJSON', function (t2) { t2.plan(1); t2.deepEqual(geojsonhint.hint(JSON.stringify(polygon)), []); });
.then(function(results) { t.deepEqual(geojsonhint.hint(results), [], 'results are valid'); t.equal(geojsonhint.hint(results.features[0]).length, 0, 'at least one valid result'); t.end(); });
client.geocodeForward('100 6th St\nSan Francisco', function(err, results) { t.ifError(err); t.deepEqual(geojsonhint.hint(results), [], 'results are valid'); t.end(); });
client.geocodeForward('Chester, New Jersey', function(err, results) { t.ifError(err); t.deepEqual(geojsonhint.hint(results), [], 'results are valid'); t.end(); });
client.geocodeReverse({ latitude: 33.6875431, longitude: -95.4431142 }, function(err, results) { t.ifError(err); t.deepEqual(geojsonhint.hint(results), [], 'results are valid'); t.end(); });
results.forEach(function(result, i) { t.deepEqual(geojsonhint.hint(result), [], 'result ' + i + ' is valid'); });