Skip to content

Instantly share code, notes, and snippets.

@mguzelevich
Last active August 10, 2023 16:17
Show Gist options
  • Save mguzelevich/18dc0a9dc0d8c95dd9b2b30c5f78c8e4 to your computer and use it in GitHub Desktop.
Save mguzelevich/18dc0a9dc0d8c95dd9b2b30c5f78c8e4 to your computer and use it in GitHub Desktop.

Readme - How to migrate from tilda to cloudflare...

  1. fill evn variables and secrets

  2. copy tilda archive to src/tilda.zip

  3. commit

  4. push

name: Deploy tilda site to CloudFlare
on:
push:
branches: ["main"]
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "cloudflare"
cancel-in-progress: false
env:
BUILD_PATH: "." # default value when not using subfolders
ASSET_DIRECTORY: dist
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
PROJECT_NAME: ${{ secrets.CLOUDFLARE_PROJECT_NAME }}
SITE: ${{ vars.SITE_URL }}
jobs:
build:
name: build
runs-on: ubuntu-latest
container:
image: golang:latest
volumes:
- source:/app
options: --cpus 1
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Build
run: |
go run process.go --zip src/tilda.zip --out dist
- name: upload
uses: actions/upload-artifact@v3
with:
name: tilda
path: dist
retention-days: 1
deploy:
name: deploy
runs-on: ubuntu-latest
needs: build
steps:
- name: download
uses: actions/download-artifact@v3
with:
name: tilda
- name: Publish to Cloudflare Pages
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ env.ACCOUNT_ID }}
projectName: ${{ env.PROJECT_NAME }}
directory: . # ${{ env.ASSET_DIRECTORY }}
branch: main
package main
import (
"archive/zip"
"bufio"
"bytes"
"flag"
"fmt"
"io"
"io/fs"
"io/ioutil"
"log"
"os"
"path/filepath"
"regexp"
"strings"
)
var (
rewriteRe = regexp.MustCompile(`^RewriteRule \^(.+)\$ (.*.html) \[NC\]$`)
errorRe = regexp.MustCompile(`^ErrorDocument ([0-9]+) /(.*)$`)
indexRe = regexp.MustCompile(`^DirectoryIndex (.*)$`)
)
type fileReaderF func(filename string) (fs.File, error)
type myCloser interface {
Close() error
}
func check(e error) {
if e != nil {
panic(e)
}
}
func copy(sourceFile string, destinationFile string) error {
dst := filepath.Dir(destinationFile)
os.MkdirAll(dst, 0700)
if input, err := ioutil.ReadFile(sourceFile); err != nil {
return err
} else {
if err := ioutil.WriteFile(destinationFile, input, 0644); err != nil {
return err
}
}
return nil
}
func save(reader fileReaderF, sourceFile string, destinationFile string) error {
dst := filepath.Dir(destinationFile)
os.MkdirAll(dst, 0700)
f, err := reader(sourceFile)
if err != nil {
return fmt.Errorf("ERROR! %s", err)
}
buf := bytes.NewBuffer(nil)
io.Copy(buf, f) // Error handling elided for brevity.
defer f.Close()
// if input, err := ioutil.ReadFile(sourceFile); err != nil {
// return err
// } else {
res := strings.ReplaceAll(buf.String(), "https://clics.store/", "/")
if err := ioutil.WriteFile(destinationFile, []byte(res), 0644); err != nil {
return err
}
// }
return nil
}
func closeFile(f myCloser) {
err := f.Close()
check(err)
}
var (
tildaZip = flag.String("zip", "", "source dir")
tildaDir = flag.String("src", "", "source dir")
outputDir = flag.String("out", "./.output", "output dir")
)
func init() {
flag.Parse()
}
func readerZip(zf *zip.ReadCloser) (fileReaderF, error) {
openFile := func(filename string) (fs.File, error) {
fc, err := zf.Open(filename)
// defer closeFile(fc)
// content, err := ioutil.ReadAll(fc)
// check(err)
return fc, err
}
return openFile, nil
}
func readerFS(root string) (fileReaderF, error) {
openFile := func(filename string) (fs.File, error) {
fc, err := os.Open(filepath.Join(root, filename))
return fc, err
}
return openFile, nil
}
func lsZip(zf *zip.ReadCloser) ([]string, error) {
idx := 0
files := []string{}
for _, file := range zf.File {
fileName := file.Name
if idx == 0 {
} else if fileName[len(fileName)-1] == '/' {
continue
}
files = append(files, fileName)
idx++
}
return files, nil
}
func ls(src string) ([]string, error) {
visitor := func(root string, files chan string) (fs.WalkDirFunc, error) {
idx := 0
fn := func(path string, di fs.DirEntry, err error) error {
p := path[len(root):]
if p == "" {
return nil
} else if idx == 0 {
files <- p
} else if di.IsDir() {
return nil
} else {
files <- p
}
idx++
return nil
}
return fn, nil
}
files := []string{}
filesChan := make(chan (string))
fn, _ := visitor(src, filesChan)
go func() {
for path := range filesChan {
files = append(files, path)
}
}()
if err := filepath.WalkDir(src, fn); err != nil {
}
return files, nil
}
func htaccessContent(reader fileReaderF, prefix string, file string) (map[string]string, map[string]string, error) {
pages := map[string]string{}
rewrites := map[string]string{}
fullpath := filepath.Join(prefix, file)
htaccess, err := reader(fullpath)
if err != nil {
log.Fatalf("read htaccess error:", err)
}
defer htaccess.Close()
fileScanner := bufio.NewScanner(htaccess)
fileScanner.Split(bufio.ScanLines)
for fileScanner.Scan() {
line := fileScanner.Text()
log.Printf("%s", line)
if matches := rewriteRe.FindAllStringSubmatch(line, -1); matches != nil {
dst := matches[0][1]
src := matches[0][2]
if dst[len(dst)-1] == '/' {
dst = dst[:len(dst)-1]
}
rewrites[filepath.Join(prefix, src)] = dst
} else if matches := errorRe.FindAllStringSubmatch(line, -1); matches != nil {
dst := fmt.Sprintf("%s.html", matches[0][1])
src := matches[0][2]
pages[filepath.Join(prefix, src)] = dst
} else if matches := indexRe.FindAllStringSubmatch(line, -1); matches != nil {
dst := "index.html"
src := matches[0][1]
// pp := strings.Split(src, "/")
// src = src[len(fmt.Sprintf("%s/", pp[0])):]
// log.Fatalf("%s -> %s", src, dst)
pages[filepath.Join(prefix, src)] = dst
} else {
continue
}
}
return pages, rewrites, nil
}
func extractData(reader fileReaderF, prefix string, files []string) (map[string]string, map[string]string, map[string]string, error) {
ignore := map[string]string{
filepath.Join(prefix, "htaccess"): "",
filepath.Join(prefix, "readme.txt"): "",
}
pages, rewrites, _ := htaccessContent(reader, prefix, "htaccess")
other := map[string]string{}
for _, src := range files {
_, iOk := ignore[src]
_, pOk := pages[src]
_, rOk := rewrites[src]
if pOk || rOk || iOk {
continue
}
pp := strings.Split(src, "/")
dst := src[len(fmt.Sprintf("%s/", pp[0])):]
other[src] = dst
}
return pages, rewrites, other, nil
}
func repackContent(reader fileReaderF, output string, pages map[string]string, rewrites map[string]string, other map[string]string) error {
log.Printf("pages:")
for srcFile, dstFile := range pages {
log.Printf("%s -> %s", srcFile, dstFile)
dst := filepath.Join(output, dstFile)
save(reader, srcFile, dst)
}
log.Printf("rewrites:")
for srcFile, dstFile := range rewrites {
log.Printf("%s -> %s/", srcFile, dstFile)
dst := filepath.Join(output, dstFile, "index.html")
if err := save(reader, srcFile, dst); err != nil {
log.Printf("[ERRR] save file error: %v", err)
}
}
log.Printf("other:")
for srcFile, dstFile := range other {
log.Printf("%s -> %s", srcFile, dstFile)
dst := filepath.Join(output, dstFile)
save(reader, srcFile, dst)
}
return nil
}
func main() {
var reader fileReaderF
files := []string{}
if *tildaZip != "" {
zf, err := zip.OpenReader(*tildaZip)
check(err)
defer closeFile(zf)
files, _ = lsZip(zf)
reader, _ = readerZip(zf)
} else if *tildaDir != "" {
files, _ = ls(*tildaDir)
reader, _ = readerFS(*tildaDir)
} else {
log.Fatal("--src or --zip required")
}
for idx, f := range files {
log.Printf("%03d %s", idx, f)
}
prefix := files[0]
files = files[1:]
pages, rewrites, other, _ := extractData(reader, prefix, files)
repackContent(reader, *outputDir, pages, rewrites, other)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment