Skip to content

Instantly share code, notes, and snippets.

@dskvr
Last active January 22, 2024 22:00
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dskvr/8351d9de9cf1b8184ed4387f88f2c8f1 to your computer and use it in GitHub Desktop.
Save dskvr/8351d9de9cf1b8184ed4387f88f2c8f1 to your computer and use it in GitHub Desktop.
nostr gossip/outbox protocol event generator [proof of concept] ... earlier versions of this utilized NIP-66 but for the proof of concept I simplified it to ONLY account for online relays, as opposed to the full dataset of NIP-66. replace the API getter with a NIP-66 getter & map relay characteristics so you can utilize them when applying a bias.
{
"name": "gossip.events",
"type": "module",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
"fast-safe-stringify": "2.1.1",
"node-cache": "5.1.2",
"nostr": "0.2.8"
}
}
import { RelayPool } from 'nostr'
import Cache from 'node-cache'
import safeStringify from 'fast-safe-stringify'
import fs from 'fs'
const KIND_RELAY_LIST = 10002
const KIND_FOLLOW_LIST = 3
// const SINCE_THE_BEGINNING = {
// relayLists: 1680378275,
// followLists: 1672602275,
// }
const SINCE_THE_BEGINNING = {
relayLists: 1688240675,
followLists: 1688240675,
}
const now = () => Math.round(Date.now()/1000)
let $meta = new Cache(),
relayLists = new Cache(),
followLists = new Cache(),
followerLists = new Cache(),
gossipMap = new Cache(),
processedRelayList = new Cache(),
processedFollowList = new Cache(),
since = SINCE_THE_BEGINNING,
onlineRelays = []
// const setup = function(){
// since = cache.get('since') || 1672602 //jan 1st 2023, timestamp:seconds
// }
const populateRelayLists = async function(){
return new Promise((resolve, reject) => {
if( since.relayLists > now() )
return resolve()
const $pool = new RelayPool(['wss://relay.damus.io'])
const subid = Math.random().toString(36).substring(2, 10) + Math.random().toString(36).substring(2, 10);
let found = 0
const until = since.relayLists + 60*60*6
$pool
.on('open', ($relay) => {
$relay.subscribe(subid, {kinds: [KIND_RELAY_LIST], since: since.relayLists, until: until})
})
.on('event', ($relay, subid, ev) => {
const relayList = ev.tags
//we only need r tags for poc, and if tag[2] is set they don't write to it so omit
.filter( tag => tag[0] === 'r' && !tag?.[2])
//only really need a string
.map( tag => tag[1] )
relayLists.set(ev.pubkey, relayList)
processedRelayList.set(ev.pubkey, false)
found++
})
.on('eose', ($relay) => {
since.relayLists = until
$meta.set('since', since)
console.log('relayLists:', found, 'since:', since.relayLists)
$pool.unsubscribe(subid)
$pool.close()
resolve(found)
})
})
}
const getOnlineRelays = async function(){
console.log('calling api')
const controller = new AbortController();
let timeout = setTimeout( () => controller.abort(), 10000)
return new Promise( resolve => {
fetch(`https://api.nostr.watch/v1/online`, {signal: controller.signal })
.then((response) => {
if (!response.ok) {
console.log('api error 1')
resolve([])
clearTimeout(timeout)
return
}
response.json()
.then( json => {
console.log('api results retrieved.')
json = json.map( relay => new URL(relay).toString() )
resolve(json)
clearTimeout(timeout)
})
.catch( (e) => {
console.log('api error 2', e)
resolve([])
clearTimeout(timeout)
})
})
.catch( () => {
console.log('api error 3')
resolve([])
clearTimeout(timeout)
})
})
}
const populateFollowLists = async function(){
return new Promise((resolve, reject) => {
if( since.followLists > now() )
return resolve()
const $pool = new RelayPool(['wss://relay.damus.io'])
const subid = Math.random().toString(36).substring(2, 10) + Math.random().toString(36).substring(2, 10);
let found = 0
const until = since.followLists + 60*60*6
$pool
.on('open', ($relay) => {
$relay.subscribe(subid, {kinds:[KIND_FOLLOW_LIST], since: since.followLists, until: until})
})
.on('event', ($relay, subid, ev) => {
const followList = ev.tags
//we only need r tags for poc, and if tag[2] is set they don't write to it so omit
.filter( tag => tag[0] === 'p')
//only really need a string
.map( tag => tag[1] )
followList.forEach( hex => {
const followers = followLists.get(hex) || []
followers.push(ev.pubkey)
})
console.dir(followList)
if(!followList.length)
return
followLists.set(ev.pubkey, followList)
processedFollowList.set(ev.pubkey, false)
found++
})
.on('eose', ($relay) => {
console.log('followLists:', found, 'since:', since.followLists)
since.followLists = until
$meta.set('since', since)
$pool.unsubscribe(subid)
$pool.close()
resolve(found)
})
})
}
const populateRelayStatuses = async function(){
}
const generateFollowerList = function(){
relayLists.keys().forEach( userHex => {
const followList = followLists.get(userHex)
if(!followList)
return
followList.forEach( followHex => {
const followers = new Set(followerLists.get(followHex) || [])
followers.add(userHex)
followerLists.set(followHex, Array.from(followers))
})
})
}
const generateGossipMap = function(){
console.dir(onlineRelays)
let map = {}
followLists.keys().forEach( userHex => {
const followList = followLists.get(userHex)
if(!followList)
return
followList.forEach( followHex => {
const followRelayList = relayLists.get(followHex)
if(!followRelayList)
return
followRelayList.forEach( relay => {
try {
relay = new URL(relay).toString()
}
catch(e){}
// console.log(relay, onlineRelays.includes(relay))
if(!onlineRelays.includes(relay))
return
if(!map[relay])
map[relay] = new Set()
//filter the relay here
map[relay].add(followHex)
})
})
let arr = []
Object.keys(map).forEach( relay => {
arr.push([relay, ...Array.from(map[relay])])
})
arr = consolidateUrls(arr)
gossipMap.set(userHex, arr)
})
}
let consolidateUrls = (urlArrays) => {
let urlMaps = urlArrays.map(urlArray => new Map([[urlArray[0], new Set(urlArray.slice(1))]]));
for(let i = 0; i < urlMaps.length; i++) {
for(let j = i + 1; j < urlMaps.length; j++) {
let url1 = [...urlMaps[i].keys()][0];
let url2 = [...urlMaps[j].keys()][0];
let hexSet1 = urlMaps[i].get(url1);
let hexSet2 = urlMaps[j].get(url2);
for(let hex of hexSet2) {
if(!hexSet1.has(hex)) {
hexSet1.add(hex);
} else {
hexSet2.delete(hex);
}
}
if(hexSet2.size === 0) {
urlMaps.splice(j, 1);
j--;
}
}
}
return urlMaps.map(urlMap => {
let url = [...urlMap.keys()][0];
let hexArray = [...urlMap.get(url)];
return [url, ...hexArray];
});
};
const generateEvents = function(){
relayLists.keys()
//.filter( userHex => !processedRelayList.get(userHex) || !processedFollowList.get(userHex) )
.forEach( (userHex) => {
const map = gossipMap.get(userHex)
if(!map)
return
writeEvent( userHex, generateEvent(userHex, map) )
})
}
const generateEvent = function(user, relayHexGroups){
const event = {}
event.created_at = now()
event.content = ''
event.tags = []
event.tags.push(['d', user])
relayHexGroups.forEach( group => {
group.unshift('r')
event.tags.push(group)
})
return event
}
const signEvent = async function(event){
// const signature = await sign(event)
// event.signature = signature
// return event
}
// no db for poc
const readCache = function(){
let data = null
try {
data = JSON.parse(fs.readFileSync('./cache.json', 'utf8'))
} catch (err) {
console.warn('first run, no cache, endpoints will be available in a bit.')
}
if(data)
Object.keys(data).forEach( cacheKey => {
if(!data[cacheKey].length)
return
console.log(cacheKey)
data[cacheKey].forEach( data => {
if(!data)
return
const $theCache = eval(cacheKey)
$theCache.set(data[0], data[1])
})
})
}
const writeCache = function(){
const data = {
'relayLists': relayLists.keys().map( key => [key, relayLists.get(key)] ),
'followLists': followLists.keys().map( key => [key, followLists.get(key)] ),
'followerLists': followerLists.keys().map( key => [key, followerLists.get(key)] ),
'gossipMap': gossipMap.keys().map( key => [key, gossipMap.get(key)] ),
'processedRelayList': processedRelayList.keys().map( key => [key, processedRelayList.get(key)] ),
'processedFollowList': processedFollowList.keys().map( key => [key, processedFollowList.get(key)] ),
'$meta': $meta.keys().map( key => [key, $meta.get(key)] ),
}
fs.writeFileSync('./cache.json', safeStringify(data));
}
const writeEvent = function(user, event){
console.log(event)
fs.writeFileSync(`./events/${user}.json`, JSON.stringify(event));
}
readCache()
console.log('cache:',$meta.keys())
const run = async function(){
console.dir(since)
since = $meta.get('since') || since
console.dir(since)
let end = true
while(!end){
const foundRelayLists = await populateRelayLists()
const foundFollowLists = await populateFollowLists()
writeCache()
if(foundRelayLists === 0 && foundFollowLists === 0 && since.relayLists > now() - 60*10 && since.followLists > now() - 60*10)
end = true
console.log('next round...')
// await new Promise( resolve => setTimeout( resolve, 5000 ) )
}
onlineRelays = await getOnlineRelays()
generateFollowerList()
generateGossipMap()
generateEvents()
// gossipMap.keys().forEach( userHex => {
// console.log('----------------------------------')
// console.log(userHex, gossipMap.get(userHex))
// })
// run()
}
run()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment