Skip to content

Instantly share code, notes, and snippets.

@awkay
Created June 27, 2023 21:34
Show Gist options
  • Save awkay/3cf6d550986d4e25feefdebb8ff00671 to your computer and use it in GitHub Desktop.
Save awkay/3cf6d550986d4e25feefdebb8ff00671 to your computer and use it in GitHub Desktop.
(ns file-drops
(:require
[clojure.core.async :as async :refer [go]]
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.data-fetch :as df]
[com.fulcrologic.fulcro.dom.events :as evt]
[com.fulcrologic.rad.blob :as blob]
[taoensso.timbre :as log]))
(defn- transfer-entries [event]
(keep
(fn [^js item]
(try
(.webkitGetAsEntry item)
(catch :default e
(log/error e))))
(.. ^js event -dataTransfer -items)))
(defn directory? [^js entry]
(.-isDirectory entry))
(defn entry->js-file
[entry]
(let [c (async/chan)]
(async/go
(.file entry (fn [f] (async/go (async/>! c f)))))
c))
(defn scan
"Recursively scan a FileEntry (which might be a directory). Returns an async channel containing a list of
description maps of the files in `entry` (a file or directory).
WARNING: This is not supported on every browser, and the standard may evolve/change.
The maps include:
:file - The js file
:sha - The sha-256 of the file's content
:name - file name
:path - The path to the file relative from the drop root"
[^js entry]
(let [result (async/chan)]
(if (directory? entry)
(let [dir-reader (.createReader entry)
all-entries (volatile! [])
get-entries (fn get-entries* []
(.readEntries dir-reader
(fn [^js results]
(let [nitems (.-length results)]
(if (and (number? nitems) (pos? nitems))
(do
(vswap! all-entries into results)
(get-entries*))
(let [entries @all-entries]
(async/go-loop [subentry (first entries)
others (rest entries)
results []]
(if subentry
(let [children (async/<! (scan subentry))
new-results (into results children)]
(recur (first others) (rest others) new-results))
(async/>! result results)))))))
(fn [err] (log/error err))))]
(get-entries))
(go
(try
(let [file (async/<! (entry->js-file entry))
sha (async/<! (blob/file-sha256 file))]
(async/>! result [{:file file
:name (.-name entry)
:path (.-fullPath entry)
:sha sha}]))
(catch :default e
(log/error e)
(async/>! result [{:entry entry}])))))
result))
(defn drop-listener-props
"Returns drag-n-drop event listeners that are necessary for handling the drop area events. Use this in place of the
map of props in your DOM element. It accepts k/v pairs to add to the props, and requires at least one: `filesDropped`, which
will be passed a vector of maps that describe the items that were dropped.
```
(dom/div (drop-listener-props
:filesDropped (fn [items] ...)
:style {:backgroundColor :red}
))
...)
```
The maps in `items` will include:
:file - the-js-file
:name - the-short-filename
:path - the-path-from-drop-source-root
:sha - The sha-256 of the file's content
This will also set a handler for input events, so it can be using with file inputs.
"
[& {:keys [filesDropped] :as props}]
(merge
(dissoc props :filesDropped)
{:onDragOver (fn [evt] (evt/prevent-default! evt))
:onChange (fn [evt]
(when filesDropped
(let [file (first (.. evt -target -files))]
(async/go
(if file
(let [sha (async/<! (blob/file-sha256 file))]
(filesDropped [{:file file
:name (.-name file)
:path (.-name file)
:sha sha}]))
(filesDropped []))))))
:onDrop (fn [evt]
(evt/prevent-default! evt)
(when filesDropped
(let [entries (vec (transfer-entries evt))]
(async/go-loop [entry (first entries)
others (rest entries)
results []]
(if entry
(let [items (async/<! (scan entry))
new-results (into results items)]
(recur (first others) (rest others) new-results))
(filesDropped results))))))}))
(defn put-to-presigned-url
"Upload the given `file` with `content-type` to `presigned-url`. Call `complete` on success, and
`failed` on failure."
[presigned-url ^js file complete failed]
(let [filename (or (.-name file) "download")
#_#_headers (js/Headers. #js {"Content-Type" mime-type})]
(-> (js/fetch presigned-url #js {:headers headers
:method "PUT"
:credentials "omit"
:body file})
(.then (fn [^js resp]
(if (.-ok resp)
(complete resp)
(failed resp))))
(.catch #(failed %)))))
(defsc Upload
"A simple normalizing component for dealing with file transfers by SHA"
[this props]
{:ident :file-store/sha
:query [:file-store/sha
:file-store/over-quota?
:file-store/signed-url
:file-store/exists?]})
(defn upload!
"Ask the file store for a pre-signed URL to which we can upload the given item (which is a js :file and
computed :sha combo).
The server MUST have an API resolver that can accept a `:blob-store/sha` as INPUT, and return
`{:blob-store/exists? boolean? :blob-store/signed-url url}`. The signed URL MUST NOT be returned if the
object exists as that can create a security hole where someone can overwrite
content of an already present (and immutable) object in the store.
If the object does not already exist, then this function will trigger a PUT to the S3 store in the correct
location (the signed url) and will call `(on-success sha resp)` or `(on-fail sha resp)` based on the final result.
If the SHA was already present, then this function immediately calls `(on-success sha {})`."
[app-ish {:keys [^js file sha] :as item} on-success on-fail]
(df/load! app-ish [:file-store/sha sha] Upload
{:params {:filename (.-name file)}
:post-action (fn [{:keys [state]}]
(let [{:file-store/keys [signed-url over-quota? exists?]} (get-in @state [:file-store/sha sha])]
(cond
exists? (on-success sha {})
over-quota? (on-fail sha {:error :quota})
:else (put-to-presigned-url signed-url file
(fn [resp] (on-success sha resp))
(fn [resp] (on-fail sha resp))))))
:error-action (fn [{:keys [state]}] (on-fail sha {:error :network}))}))
(defn drop-processor
"A generalized helper for processing the files from a drag-and-drop operation. This function returns a function that
can be used as the `filesDropped` option to the drop-listener-props.
It will attempt to upload the files one at a time, allowing you to expand details about each and post-process the
final result.
The processing chain is:
```
For each item:
|
v
item -> expand -> ignore? -no-> upload? --yes-> upload w/on-success/on-fail -> mark uploaded
(post-process env resulting-items)
```
`env` - Anything you want. This is just forwarded to the processing functions internally, and can have things like
atoms or lookup maps for doing computations/expansions/recording.
`expand` - Optional. Default is to not add any keys to the item.
A `(fn [env {:keys [name sha path file]}] (chan map?))` that returns an async channel that will contain
a map. That map will be merged with the item for future steps in the chain. You can use this to do things
like add content-type or other namespaced keys that relate to information about the file.
`upload?` - Optional. The default is to upload all items.
A core.async function that returns a channel that will eventually have a boolean in it.
`(fn [env {:keys [name sha path file]}] (chan boolean?))`. If you return true on an item, then an
attempt will be made to upload that file before post-processing (you will still see the
file during post-processing).
`ignore?` - Optional. A core async function. `(fn [env item] (chan boolean?))`.
If provided then a true value in the returned channel indicates that
the given item should be completely ignored and not flow through the processing chain nor appear at the ouput.
`on-success` - Optional. A `(fn [env item])` that is called when a given file has been uploaded and expanded. This can be used to
track progress, and will be called as each file is processed. This will be called on files that
are not uploaded, but are just expanded.
`on-fail` - Optional. A `(fn [env item])` that is called when an ATTEMPT to upload a given file failed or expansion failed. If it
returns the value `:abort`, then the remainder of the processing is bypassed (no call to post-process, no
more uploads).
`abort?` - Optional. A `(fn [] boolean?)`. Called at each step. If it returns `false`, then the pipeline is aborted and no further items are processed.
This also means `post-process` will NOT be called.
`post-process` - Optional. `(fn [items])`.
If supplied, this function will receive a sequence of the (already) uploaded and expanded
items. Each item will contain at least the `{:keys [sha name file path uploaded?]}`, where `uploaded?` is
a boolean indicator for if the file was successfully uploaded (will be false if your upload predicate
prevented the upload). If the `expand` step added any data, then those keys will be in the resulting items
as well.
If none of the configuration options are supplied, then all files will be uploaded, but nothing else will happen.
"
[app-ish env {:keys [upload? ignore? expand on-success abort? on-fail post-process]}]
(fn [items]
(let [gathered-items (atom [])
upload-failed? (atom false)]
(async/go-loop [items items]
(if-let [{:keys [path] :as item} (first items)]
(let [expanded-data (when expand (async/<! (expand env item)))
item (merge item expanded-data)
ignored? (if (ifn? ignore?)
(async/<! (ignore? env item))
false)
stop? (or
@upload-failed?
(if (fn? abort?) (abort?) false))
doUpload? (if (ifn? upload?)
(async/<! (upload? env item))
true)
upload-result-channel (async/chan)]
(log/debug path "expanded" expanded-data "ignore?" ignored? "upload?" doUpload? "stop?" stop?)
(when-not stop?
(if ignored?
(recur (rest items))
(do
(if doUpload?
(do
(upload! app-ish item
(fn [_ _]
(when on-success (on-success env item))
(async/go
(async/>! upload-result-channel :success)))
(fn [_ _]
(reset! upload-failed? true)
(when on-fail (on-fail env item))
(async/go (async/>! upload-result-channel :failed))))
(let [result (async/<! upload-result-channel)]
(swap! gathered-items conj (assoc item :uploaded? (= :success result)))))
(swap! gathered-items conj (assoc item :uploaded? false)))
(recur (rest items))))))
(let [stop? (or @upload-failed? (if (fn? abort?) (abort?) false))]
(when (and (not stop?) post-process)
(post-process @gathered-items))))))))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment