Skip to content

Instantly share code, notes, and snippets.

var http = require('http')
class HttpStore {
constructor(storeUrl) {
// begin downloading the url we requested, stream it to our handleData function
http.get(storeUrl, (res) => {
res.on('data', this.handleData.bind(this))
res.on('end', this.handleEnd.bind(this))
function processFile(file) {
fs.createReadStream(file) // open the gzipped csv file and start sending chunks of it downstream
.pipe(zlib.createGunzip()) // once you get a chunk, decompress it and pass it downstream
.pipe(csv()) // convert the decompressed chunk into individual rows and values
.on('data', processRow(row)) // if a row has a parallax measurement, calculate its position and write it to a new file
}
updateGeometry(buf, offset, count) {
if(this.offsetsProcessed.indexOf(offset) === -1) {
if(buf.length !== count) {
console.log("Invalid packet, rejecting");
}
this.offsetsProcessed.push(offset);
var _buf = buf;
var _offset = offset / 4;
var _count = count / 4;
if(this.chunkCounter > this.torrent.pieces.length) {
this.chunkCounter = 0;
this.loading = false;
}
if(this.torrent.store.store.chunks[this.chunkCounter] !== undefined && this.torrent.pieces[this.chunkCounter] === null) {
let a = this.torrent.store.store.chunks[this.chunkCounter];
// fire off an anon function that contains a reference to the chunk id we're sending
// probably shoudl be a promise someday
(() => {
var cc = Math.floor(this.chunkCounter);
'use strict';
var fs = require('fs')
, csv = require('fast-csv')
, sqlite3 = require('sqlite3').verbose()
, db = new sqlite3.Database('./data/gaia.sqlite');
var fields = ['ra', 'dec', 'parallax', 'phot_g_mean_mag'];
var headers = ['solution_id','source_id','random_index','ref_epoch','ra','ra_error','dec','dec_error','parallax','parallax_error','pmra','pmra_error','pmdec','pmdec_error','ra_dec_corr','ra_parallax_corr','ra_pmra_corr','ra_pmdec_corr','dec_parallax_corr','dec_pmra_corr','dec_pmdec_corr','parallax_pmra_corr','parallax_pmdec_corr','pmra_pmdec_corr','astrometric_n_obs_al','astrometric_n_obs_ac','astrometric_n_good_obs_al','astrometric_n_good_obs_ac','astrometric_n_bad_obs_al','astrometric_n_bad_obs_ac','astrometric_delta_q','astrometric_excess_noise','astrometric_excess_noise_sig','astrometric_primary_flag','astrometric_relegation_factor','astrometric_weight_al','astrometric_weight_ac','astrometric_priors_used','matched_observations','duplicated_source','scan_direction_strength_k1','scan_dire
// get the given filename from the path given
http.get({
hostname: 'cdn.gea.esac.esa.int',
port: 80,
path: `/Gaia/gaia_source/csv/${filename}`,
agent: false // create a new agent just for this one request
}, (res) => {
// stream-upload the file to Amazon S3 as it comes in
res.pipe(upload);
});
var AWS = require('aws-sdk'),
fs = require('fs'),
awsCredentialsPath = './aws.credentials.json',
sqsQueueUrl = 'SQS_QUEUE_URL',
http = require('http'),
sqs;
// Load credentials from local json file
AWS.config.loadFromPath(awsCredentialsPath);
s3.getObject(params).createReadStream()
.pipe(zlib.createGunzip())
.pipe(csv())
.pipe(starValidator)
.pipe(toCsv)
.pipe(upload)
const DEG2RAD = Math.PI / 180;
fs.createReadStream(file)
.pipe(zlib.createGunzip())
.pipe(csv())
.on('data', d => {
// assume the first row is a list of headers, we can use this to do easy lookups of rows later
if(!headers) {
headers = d;
} else {
// only process rows that have a measurement for parallax
'use strict'
// by Charlie Hoey <me@charliehoey.com>
//
// script for processing a folder full of gzipped CSV files into a single filtered CSV file.
// specifically this calculates x/y/z positions and distances for stars that contain a parallax measurement
// ......
var csv = require('fast-csv')
var fs = require('fs')
const zlib = require('zlib');
var glob = require('glob');