Skip to content

Instantly share code, notes, and snippets.

@mig5
Created March 4, 2024 21:56
Show Gist options
  • Save mig5/4d4683a66ff280ba35184f6baad1b964 to your computer and use it in GitHub Desktop.
Save mig5/4d4683a66ff280ba35184f6baad1b964 to your computer and use it in GitHub Desktop.
ipfs-publish
#!/usr/bin/env python3
import argparse
import json
import logging
import requests
import time
from csv import writer
from logging.handlers import SysLogHandler
from pathlib import Path
def parse_args():
"""
Handle CLI args.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"--file",
default=False,
required=True,
action="store",
help="File to publish",
)
parser.add_argument(
"--url",
default=False,
required=True,
action="store",
help="URL to retrieve original file from.",
)
parser.add_argument(
"--logfile",
default=False,
required=True,
action="store",
help="Log file to write metadata to.",
)
parser.add_argument(
"--readme",
default=False,
required=True,
action="store",
help="README file to provide context about the other files.",
)
args = parser.parse_args()
return args
class IpfsPublish:
def __init__(self, url, file, logfile, readme):
"""
Set up our logger and set variables.
"""
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
handler = SysLogHandler(
facility=SysLogHandler.LOG_DAEMON,
address="/dev/log"
)
handler.setFormatter(logging.Formatter('ipfs-publish: %(message)s'))
self.logger.addHandler(handler)
self.url = url
self.file = file
self.logfile = logfile
self.readme = readme
def fetch(self):
"""
Fetch the remote file before adding it to IPFS.
"""
try:
r = requests.get(self.url)
r.raise_for_status()
with open(self.file, "wb") as f:
f.write(r.content)
except requests.exceptions.RequestException as e:
self.logger.debug(f"Error fetching data from source: {e}")
raise SystemExit(e)
def publish(self):
"""
Add and publish the file to IPFS.
"""
# IPFS API endpoint
url = "http://127.0.0.1:5001/api/v0"
ipns_directory_hash = "k51qzi5uqu5dlnwjrnyyd6sl2i729d8qjv1bchfqpmgfeu8jn1w1p4q9x9uqit"
# Add the file
print("Getting the file name")
p = Path(self.file)
with p.open("rb") as fp:
fb = fp.read()
file_name = p.name
try:
# Add our current file to IPFS and get the CID hash
print("Adding the file to IPFS and get the CID hash")
response = requests.post(f"{url}/add", files={file_name: fb})
print(response.json())
response.raise_for_status()
ipfs_new_hash = response.json()["Hash"]
# Resolve the current CID of the IPNS pointer
print("Resolving the current CID of the IPNS pointer")
response = requests.post(f"{url}/name/resolve?arg={ipns_directory_hash}")
print(response.json())
response.raise_for_status()
cid_of_ipns_pointer = response.json()["Path"].split("/ipfs/")[1]
# Now get the CID of the file we care about within there
print("Getting the CID of the file we care about within IPNS CID")
response = requests.post(f"{url}/dag/get?arg={cid_of_ipns_pointer}")
print(response.json())
response.raise_for_status()
links = response.json().get("Links", "")
for link in links:
if link.get("Name", "") == file_name:
ipfs_previous_hash = link["Hash"]["/"]
break
# If the new hash is different, we will re-add the directory of files
# and re-publish the static IPNS pin to point to that new directory CID
self.logger.debug(f"Previous hash: {ipfs_previous_hash}, new hash: {ipfs_new_hash}")
response.raise_for_status()
# Add attributes and contents of changelog and README for publishing
print("Adding attributes and contents of changelog and README for publishing")
l = Path(self.logfile)
with l.open("rb") as fp:
lb = fp.read()
log_name = l.name
r = Path(self.readme)
with r.open("rb") as fp:
rm = fp.read()
readme_name = r.name
# Publish a folder containing the files for more human-friendly viewing.
folder_and_file_name = f"pft-incidents%2F{file_name}"
folder_and_log_name = f"pft-incidents%2F{log_name}"
folder_and_readme_name = f"pft-incidents%2F{readme_name}"
folder_and_files = {
readme_name: (
folder_and_readme_name,
rm,
"application/octet-stream",
{"filename": folder_and_readme_name},
),
file_name: (
folder_and_file_name,
fb,
"application/octet-stream",
{"filename": folder_and_file_name},
),
log_name: (
folder_and_log_name,
lb,
"application/octet-stream",
{"filename": folder_and_log_name},
),
}
print("Publishing folder containing the files for human-friendly viewing. Still just as IPFS, not IPNS")
response = requests.post(f"{url}/add", files=folder_and_files)
print(response.text)
response.raise_for_status()
# Multiple hashes don't seem to be returned as proper json, and
# we only care about the directory's hash, so split it up and
# obtain it.
ipfs_response = response.text.split("\n")
for item in ipfs_response:
j = json.loads(item)
if j["Name"] == "pft-incidents":
# Point our IPNS at the new directory CID hash
print("Pointing our IPNS at the new directory CID hash")
ipfs_hash = j["Hash"]
response = requests.post(
f"{url}/name/publish?arg={ipfs_hash}&key={ipns_directory_hash}"
)
print(response.json())
response.raise_for_status()
break
except requests.exceptions.RequestException as e:
self.logger.debug(f"Error posting to IPFS: {e}")
raise SystemExit(e)
if __name__ == "__main__":
# Parse arguments
args = parse_args()
ipfs = IpfsPublish(args.url, args.file, args.logfile, args.readme)
# Fetch the file
ipfs.fetch()
# Add into IPFS
ipfs.publish()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment