~ghost08/ratt

ratt: Allowing ratt be used as a library v1 APPLIED

~mariusor
~mariusor: 2
 Making the ratt functionality work as a library
 Update install command in the documenation

 7 files changed, 210 insertions(+), 209 deletions(-)
Export patchset (mbox)
How do I use this?

Copy & paste the following snippet into your terminal to import this patchset into git:

curl -s https://lists.sr.ht/~ghost08/ratt/patches/23733/mbox | git am -3
Learn more about email & git
View this thread in the archives

[PATCH ratt 1/2] Making the ratt functionality work as a library Export this patch

~mariusor
From: Marius Orcsik <marius@federated.id>

---
 cmd/ratt/main.go |  55 +++++++++++++++
 confs.go         |  55 +++++++++++----
 feed.go          | 121 +++++++++++++++++++++++++++-----
 http_settings.go |   4 +-
 lua.go           |   6 +-
 main.go          | 176 -----------------------------------------------
 6 files changed, 209 insertions(+), 208 deletions(-)
 create mode 100644 cmd/ratt/main.go
 delete mode 100644 main.go

diff --git a/cmd/ratt/main.go b/cmd/ratt/main.go
new file mode 100644
index 0000000..7cb30e6
--- /dev/null
+++ b/cmd/ratt/main.go
@@ -0,0 +1,55 @@
package main

import (
	"log"
	"net/url"

	"git.sr.ht/~ghost08/ratt"
	"github.com/alecthomas/kong"
)

var CLI struct {
	Extract struct {
		Selectors  ratt.Selectors `embed`
		OutputType string         `short:"t" enum:"rss,atom,json" default:"rss" help:"output type (rss/atom/json)"`
		URL        *url.URL       `xor:"1" arg name:"url" help:"Site url."`
	} `cmd help:"extract feed from side using specified selectors"`
	Save struct {
		Selectors ratt.Selectors `embed`
		UrlRegex  string         `required arg help:"regex of the website url for the config file"`
		Path      string         `arg type:"path" help:"config file path"`
	} `cmd help:"save selectors to a config file"`
	Auto struct {
		OutputType string   `short:"t" enum:"rss,atom,json" default:"rss" help:"output type (rss/atom/json)"`
		URL        *url.URL `xor:"1" arg name:"url" help:"Site url."`
	} `cmd help:"automatically will try to find config for the website url (searches internal config files, in current directory and in ~/.config/ratt/*.conf)"`
	DataURLEncode string `optional name:"data-urlencode" help:"adds new url-encoded values to url"`
	Verbose       bool   `optional default:"false" short:"v" help:"will print out the full http requests and responses"`
}

func main() {
	log.SetFlags(log.Lshortfile | log.LstdFlags)
	ctx := kong.Parse(&CLI,
		kong.Name("ratt"),
		kong.Description("RSS all the things!\nA small html to rss/atom/json feed generator"),
		kong.UsageOnError(),
		kong.ConfigureHelp(kong.HelpOptions{
			Compact: true,
			Summary: true,
		}))

	switch ctx.Command() {
	case "extract <url>":
		ratt.GlobalHTTPSettings = &CLI.Extract.Selectors.HTTPSettings
		ratt.Extract(CLI.Extract.URL, CLI.Extract.Selectors, CLI.Extract.OutputType, CLI.Verbose, CLI.DataURLEncode)
	case "save <url-regex> <path>":
		ratt.Save(CLI.Save.Path, CLI.Save.UrlRegex, CLI.Save.Selectors)
	case "auto <url>":
		selectors, err := ratt.FindSelectors(CLI.Auto.URL.String(), CLI.Verbose)
		if err != nil {
			log.Fatal(err)
		}
		ratt.GlobalHTTPSettings = &selectors.HTTPSettings
		ratt.Extract(CLI.Auto.URL, selectors, CLI.Auto.OutputType, CLI.Verbose, CLI.DataURLEncode)
	}
}
diff --git a/confs.go b/confs.go
index 3758a61..cc64ba3 100644
--- a/confs.go
+++ b/confs.go
@@ -1,6 +1,7 @@
package main
package ratt

import (
	"embed"
	"fmt"
	"io/fs"
	"log"
@@ -12,13 +13,43 @@ import (
	"gopkg.in/yaml.v2"
)

type conf struct {
//go:embed confs/*
var confs embed.FS

type SortEnum string

type Selectors struct {
	HTTPSettings HTTPSettings `yaml: "httpsettings" embed`
	Feed         struct {
		Title       string `required name:"feed-title" help:"css selector for the feed title"`
		Description string `optional name:"feed-description" help:"css selector for the feed description"`
		AuthorName  string `optional help:"css selector for the feed author name"`
		AuthorEmail string `optional help:"css selector for the feed author email"`
	} `yaml:"feed" embed`
	Item struct {
		Container     string `required name:"item-container" help:"css selector for the item container"`
		Title         string `required name:"item-title" help:"css selector for the item title"`
		Link          string `required name:"item-link" help:"css selector for the item link"`
		LinkAttr      string `default:"href" name:"item-link-attr" help:"get attribute value of the item link element"`
		Created       string `required name:"item-created" help:"css selector for the item created time"`
		CreatedFormat string `required name:"item-created-format" help:"css selector for the item created time format"`
		Description   string `name:"item-description" help:"css selector for the item description"`
		Image         string `name:"item-image" help:"css selector for the item image"`
		ImageAttr     string `name:"item-image-attr" default:"src" help:"get attribute value of the item image element"`
	} `yaml:"item" embed`
	NextPage      string   `optional help:"css selector for the link to the next page to be scraped"`
	NextPageAttr  string   `optional default:"href" help:"get attribute value of the next page element"`
	NextPageCount int      `optional help:"how deep to follow the next page link (integer value)"`
	Sort          SortEnum `optional enum:",REVERSE,CREATED_ASD,CREATED_DESC,TITLE_ASD,TITLE_DESC" help:"after all items are downloaded, ratt will sort them by the provided attribute"`
}

type Conf struct {
	Regex string
	Selectors
}

func save(filepath, regex string, selectors Selectors) {
	conf := conf{
func Save(filepath, regex string, selectors Selectors) {
	conf := Conf{
		Regex:     regex,
		Selectors: selectors,
	}
@@ -32,16 +63,16 @@ func save(filepath, regex string, selectors Selectors) {
	}
}

func findSelectors(url string) (Selectors, error) {
func FindSelectors(url string, verbose bool) (Selectors, error) {
	confs, err := fs.Sub(confs, "confs")
	if err != nil {
		return Selectors{}, err
	}
	selectors, err := findSelectorsInDir(url, "embededConfs", confs)
	selectors, err := findSelectorsInDir(url, "embededConfs", confs, verbose)
	if err == nil {
		return selectors, nil
	}
	selectors, err = findSelectorsInDir(url, "currentDir/", os.DirFS("."))
	selectors, err = findSelectorsInDir(url, "currentDir/", os.DirFS("."), verbose)
	if err == nil {
		return selectors, nil
	}
@@ -50,7 +81,7 @@ func findSelectors(url string) (Selectors, error) {
		return Selectors{}, err
	}
	homeConf := filepath.Join(home, ".config", "ratt")
	selectors, err = findSelectorsInDir(url, homeConf, os.DirFS(homeConf))
	selectors, err = findSelectorsInDir(url, homeConf, os.DirFS(homeConf), verbose)
	if err != nil {
		if os.IsNotExist(err) {
			return Selectors{}, fmt.Errorf("no conf found for %s", url)
@@ -60,7 +91,7 @@ func findSelectors(url string) (Selectors, error) {
	return selectors, nil
}

func findSelectorsInDir(url, dirname string, dir fs.FS) (Selectors, error) {
func findSelectorsInDir(url, dirname string, dir fs.FS, verbose bool) (Selectors, error) {
	dirs, err := fs.ReadDir(dir, ".")
	if err != nil {
		return Selectors{}, err
@@ -76,7 +107,7 @@ func findSelectorsInDir(url, dirname string, dir fs.FS) (Selectors, error) {
		if err != nil {
			return Selectors{}, err
		}
		var conf conf
		var conf Conf
		if err := yaml.NewDecoder(f).Decode(&conf); err != nil {
			return Selectors{}, err
		}
@@ -84,13 +115,13 @@ func findSelectorsInDir(url, dirname string, dir fs.FS) (Selectors, error) {
		if err != nil {
			return Selectors{}, err
		}
		if CLI.Verbose {
		if verbose {
			log.Printf("checking regex: %s/%s", dirname, d.Name())
		}
		if !re.MatchString(url) {
			continue
		}
		if CLI.Verbose {
		if verbose {
			log.Printf("using config: %s/%s", dirname, d.Name())
		}
		return conf.Selectors, nil
diff --git a/feed.go b/feed.go
index 51a4484..e6bee0f 100644
--- a/feed.go
+++ b/feed.go
@@ -1,4 +1,4 @@
package main
package ratt

import (
	"fmt"
@@ -7,6 +7,7 @@ import (
	"net/http/httputil"
	"net/url"
	"os"
	"sort"
	"strconv"
	"strings"
	"sync"
@@ -17,7 +18,7 @@ import (
	lua "github.com/yuin/gopher-lua"
)

func getDocFromUrl(link *url.URL) (*goquery.Document, error) {
func getDocFromUrl(link *url.URL, verbose bool) (*goquery.Document, error) {
	client := GlobalHTTPSettings.Client()
	req, err := http.NewRequest("GET", link.String(), nil)
	if err != nil {
@@ -27,7 +28,7 @@ func getDocFromUrl(link *url.URL) (*goquery.Document, error) {
	if err != nil {
		return nil, fmt.Errorf("requesting site data: %w", err)
	}
	if CLI.Verbose {
	if verbose {
		d, _ := httputil.DumpRequest(req, true)
		log.Println(string(d))
		d, _ = httputil.DumpResponse(resp, true)
@@ -45,27 +46,31 @@ func getDocFromUrl(link *url.URL) (*goquery.Document, error) {
	return doc, nil
}

func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err error) {
	doc, err := getDocFromUrl(url)
func ConstructFeedFromURL(url *url.URL, selectors Selectors, verbose bool) (feed *feeds.Feed, err error) {
	doc, err := getDocFromUrl(url, verbose)
	if err != nil {
		return nil, err
	}
	return ConstructFeed(doc, url.String(), selectors, verbose)
}

func ConstructFeed(doc *goquery.Document, u string, selectors Selectors, verbose bool) (feed *feeds.Feed, err error) {
	feed = &feeds.Feed{
		Link:    &feeds.Link{Href: url.String()},
		Link:    &feeds.Link{Href: u},
		Created: time.Now(),
	}
	feed.Title, err = getSelData(doc.Selection, selectors.Feed.Title, url.String())
	feed.Title, err = getSelData(doc.Selection, selectors.Feed.Title, u)
	if err != nil {
		return nil, fmt.Errorf("feed title: %w", err)
	}
	if selectors.Feed.Description != "" {
		feed.Description, err = getSelData(doc.Selection, selectors.Feed.Description, url.String())
		feed.Description, err = getSelData(doc.Selection, selectors.Feed.Description, u)
		if err != nil {
			return nil, fmt.Errorf("feed description: %w", err)
		}
	}
	if selectors.Feed.AuthorName != "" {
		authorName, err := getSelData(doc.Selection, selectors.Feed.AuthorName, url.String())
		authorName, err := getSelData(doc.Selection, selectors.Feed.AuthorName, u)
		if err != nil {
			return nil, fmt.Errorf("feed author name: %w", err)
		}
@@ -75,7 +80,7 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err
		feed.Author.Name = authorName
	}
	if selectors.Feed.AuthorEmail != "" {
		authorEmail, err := getSelData(doc.Selection, selectors.Feed.AuthorEmail, url.String())
		authorEmail, err := getSelData(doc.Selection, selectors.Feed.AuthorEmail, u)
		if err != nil {
			return nil, fmt.Errorf("feed author email: %w", err)
		}
@@ -88,7 +93,7 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err
	var wa sync.WaitGroup
	//if item containter selector is a multiline value, then it is lua code that rerutns a number
	if strings.ContainsRune(selectors.Item.Container, '\n') {
		output, err := runLua(nil, selectors.Item.Container, url.String())
		output, err := runLua(nil, selectors.Item.Container, u)
		if err != nil {
			return nil, fmt.Errorf("item container lua code: %w", err)
		}
@@ -102,7 +107,7 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err
			i := i
			go func() {
				defer wa.Done()
				it, itemErr := getItem(nil, selectors, url.String(), addGlobal("index", lua.LNumber(i)))
				it, itemErr := getItem(nil, selectors, u, addGlobal("index", lua.LNumber(i)))
				if itemErr != nil {
					err = itemErr
					return
@@ -120,7 +125,7 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err
				wa.Add(1)
				go func() {
					defer wa.Done()
					i, itemErr := getItem(item, selectors, url.String(), addGlobal("index", lua.LNumber(index)))
					i, itemErr := getItem(item, selectors, u, addGlobal("index", lua.LNumber(index)))
					if itemErr != nil {
						err = itemErr
						return
@@ -146,12 +151,12 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err

	var nextLink string
	if selectors.NextPageAttr != "" {
		nextLink, err = getSelAttr(doc.Selection, selectors.NextPage, selectors.NextPageAttr, url.String())
		nextLink, err = getSelAttr(doc.Selection, selectors.NextPage, selectors.NextPageAttr, u)
		if err != nil {
			return nil, fmt.Errorf("getting next page link from attr: %w", err)
		}
	} else {
		nextLink, err = getSelData(doc.Selection, selectors.NextPage, url.String())
		nextLink, err = getSelData(doc.Selection, selectors.NextPage, u)
		if err != nil {
			return nil, fmt.Errorf("getting next page link: %w", err)
		}
@@ -161,7 +166,7 @@ func constructFeed(url *url.URL, selectors Selectors) (feed *feeds.Feed, err err
		return nil, fmt.Errorf("parsing next page link: %w", err)
	}
	selectors.NextPageCount -= 1
	nextFeed, err := constructFeed(nextURL, selectors)
	nextFeed, err := ConstructFeedFromURL(nextURL, selectors, verbose)
	if err != nil {
		return nil, fmt.Errorf("constructing feed from (%s): %w", nextLink, err)
	}
@@ -261,3 +266,87 @@ func dumpSelectionData(sel *goquery.Selection) {
	data, _ := goquery.OuterHtml(sel)
	fmt.Fprintf(os.Stderr, "\nSelection data: %s\n", data)
}

func addQuery(u *url.URL, data string) {
	if data == "" {
		return
	}
	if strings.Contains(data, "=") {
		for _, kv := range strings.Split(data, "&") {
			d := strings.Split(kv, "=")
			k, v := d[0], d[1]
			q := u.Query()
			q.Set(k, v)
			u.RawQuery = q.Encode()
		}
		return
	}
	u.Path += strings.ReplaceAll(data, " ", "+")
}

func Extract(link *url.URL, selectors Selectors, outputType string, verbose bool, encode string) {
	addQuery(link, encode)
	feed, err := ConstructFeedFromURL(link, selectors, verbose)
	if err != nil {
		log.Fatal(err)
	}
	if selectors.Sort != "" {
		if err := sortItems(feed, selectors.Sort); err != nil {
			log.Fatal(err)
		}
	}

	var data string
	switch outputType {
	case "rss":
		data, err = feed.ToRss()
	case "atom":
		data, err = feed.ToAtom()
	case "json":
		data, err = feed.ToJSON()
	}
	if err != nil {
		log.Fatalln("exporting data:", err)
	}
	fmt.Fprintf(os.Stdout, "%s", data)
}

const (
	SortDontSort    SortEnum = ""
	SortReverse     SortEnum = "REVERSE"
	SortCreatedASD  SortEnum = "CREATED_ASD"
	SortCreatedDESC SortEnum = "CREATED_DESC"
	SortTitleASD    SortEnum = "TITLE_ASD"
	SortTitleDESC   SortEnum = "TITLE_DESC"
)

func sortItems(feed *feeds.Feed, s SortEnum) error {
	switch s {
	case SortReverse:
		for i := 0; i < len(feed.Items)/2; i++ {
			j := len(feed.Items) - i - 1
			feed.Items[i], feed.Items[j] = feed.Items[j], feed.Items[i]
		}
	case SortCreatedASD:
		sort.Sort(ByCreated(feed.Items))
	case SortCreatedDESC:
		sort.Sort(sort.Reverse(ByCreated(feed.Items)))
	case SortTitleASD:
		sort.Sort(ByTitle(feed.Items))
	case SortTitleDESC:
		sort.Sort(sort.Reverse(ByTitle(feed.Items)))
	}
	return nil
}

type ByCreated []*feeds.Item

func (a ByCreated) Len() int           { return len(a) }
func (a ByCreated) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
func (a ByCreated) Less(i, j int) bool { return a[i].Created.Before(a[j].Created) }

type ByTitle []*feeds.Item

func (a ByTitle) Len() int           { return len(a) }
func (a ByTitle) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
func (a ByTitle) Less(i, j int) bool { return strings.Compare(a[i].Title, a[j].Title) == -1 }
diff --git a/http_settings.go b/http_settings.go
index 6f54a62..5351288 100644
--- a/http_settings.go
+++ b/http_settings.go
@@ -1,4 +1,4 @@
package main
package ratt

import (
	"crypto/tls"
@@ -12,6 +12,8 @@ type HTTPSettings struct {
	Insecure  bool              `optional short:"k" default:"false" help:"(TLS) By default, every SSL connection ratt makes is verified to be secure. This option allows ratt to proceed and operate even for server connections otherwise considered insecure"`
}

var GlobalHTTPSettings *HTTPSettings

func (s *HTTPSettings) Client() *http.Client {
	client := &http.Client{
		Transport: &rattTransport{
diff --git a/lua.go b/lua.go
index 149f140..68dbd58 100644
--- a/lua.go
+++ b/lua.go
@@ -1,13 +1,13 @@
package main
package ratt

import (
	"fmt"
	"strings"

	"github.com/PuerkitoBio/goquery"
	lua "github.com/yuin/gopher-lua"
	gojqlua "git.sr.ht/~ghost08/ratt/gojq-lua"
	goquerylua "git.sr.ht/~ghost08/ratt/goquery-lua"
	"github.com/PuerkitoBio/goquery"
	lua "github.com/yuin/gopher-lua"
)

func runLua(sel *goquery.Selection, code string, url string, luaOptions ...luaOption) (string, error) {
diff --git a/main.go b/main.go
deleted file mode 100644
index d2704e3..0000000
--- a/main.go
@@ -1,176 +0,0 @@
package main

import (
	"embed"
	"fmt"
	"log"
	"net/url"
	"os"
	"sort"
	"strings"

	"github.com/alecthomas/kong"
	"github.com/gorilla/feeds"
)

//go:embed confs/*
var confs embed.FS

var CLI struct {
	Extract struct {
		Selectors  Selectors `embed`
		OutputType string    `short:"t" enum:"rss,atom,json" default:"rss" help:"output type (rss/atom/json)"`
		URL        *url.URL  `xor:"1" arg name:"url" help:"Site url."`
	} `cmd help:"extract feed from side using specified selectors"`
	Save struct {
		Selectors Selectors `embed`
		UrlRegex  string    `required arg help:"regex of the website url for the config file"`
		Path      string    `arg type:"path" help:"config file path"`
	} `cmd help:"save selectors to a config file"`
	Auto struct {
		OutputType string   `short:"t" enum:"rss,atom,json" default:"rss" help:"output type (rss/atom/json)"`
		URL        *url.URL `xor:"1" arg name:"url" help:"Site url."`
	} `cmd help:"automatically will try to find config for the website url (searches internal config files, in current directory and in ~/.config/ratt/*.conf)"`
	DataURLEncode string `optional name:"data-urlencode" help:"adds new url-encoded values to url"`
	Verbose       bool   `optional default:"false" short:"v" help:"will print out the full http requests and responses"`
}

type Selectors struct {
	HTTPSettings HTTPSettings `yaml: "httpsettings" embed`
	Feed         struct {
		Title       string `required name:"feed-title" help:"css selector for the feed title"`
		Description string `optional name:"feed-description" help:"css selector for the feed description"`
		AuthorName  string `optional help:"css selector for the feed author name"`
		AuthorEmail string `optional help:"css selector for the feed author email"`
	} `yaml:"feed" embed`
	Item struct {
		Container     string `required name:"item-container" help:"css selector for the item container"`
		Title         string `required name:"item-title" help:"css selector for the item title"`
		Link          string `required name:"item-link" help:"css selector for the item link"`
		LinkAttr      string `default:"href" name:"item-link-attr" help:"get attribute value of the item link element"`
		Created       string `required name:"item-created" help:"css selector for the item created time"`
		CreatedFormat string `required name:"item-created-format" help:"css selector for the item created time format"`
		Description   string `name:"item-description" help:"css selector for the item description"`
		Image         string `name:"item-image" help:"css selector for the item image"`
		ImageAttr     string `name:"item-image-attr" default:"src" help:"get attribute value of the item image element"`
	} `yaml:"item" embed`
	NextPage      string   `optional help:"css selector for the link to the next page to be scraped"`
	NextPageAttr  string   `optional default:"href" help:"get attribute value of the next page element"`
	NextPageCount int      `optional help:"how deep to follow the next page link (integer value)"`
	Sort          SortEnum `optional enum:",REVERSE,CREATED_ASD,CREATED_DESC,TITLE_ASD,TITLE_DESC" help:"after all items are downloaded, ratt will sort them by the provided attribute"`
}

type SortEnum string

const (
	SortDontSort    SortEnum = ""
	SortReverse     SortEnum = "REVERSE"
	SortCreatedASD  SortEnum = "CREATED_ASD"
	SortCreatedDESC SortEnum = "CREATED_DESC"
	SortTitleASD    SortEnum = "TITLE_ASD"
	SortTitleDESC   SortEnum = "TITLE_DESC"
)

var GlobalHTTPSettings *HTTPSettings

func main() {
	log.SetFlags(log.Lshortfile | log.LstdFlags)
	ctx := kong.Parse(&CLI,
		kong.Name("ratt"),
		kong.Description("RSS all the things!\nA small html to rss/atom/json feed generator"),
		kong.UsageOnError(),
		kong.ConfigureHelp(kong.HelpOptions{
			Compact: true,
			Summary: true,
		}))

	switch ctx.Command() {
	case "extract <url>":
		GlobalHTTPSettings = &CLI.Extract.Selectors.HTTPSettings
		extract(CLI.Extract.URL, CLI.Extract.Selectors, CLI.Extract.OutputType)
	case "save <url-regex> <path>":
		save(CLI.Save.Path, CLI.Save.UrlRegex, CLI.Save.Selectors)
	case "auto <url>":
		selectors, err := findSelectors(CLI.Auto.URL.String())
		if err != nil {
			log.Fatal(err)
		}
		GlobalHTTPSettings = &selectors.HTTPSettings
		extract(CLI.Auto.URL, selectors, CLI.Auto.OutputType)
	}
}

func addQuery(u *url.URL, data string) {
	if data == "" {
		return
	}
	if strings.Contains(data, "=") {
		for _, kv := range strings.Split(data, "&") {
			d := strings.Split(kv, "=")
			k, v := d[0], d[1]
			q := u.Query()
			q.Set(k, v)
			u.RawQuery = q.Encode()
		}
		return
	}
	u.Path += strings.ReplaceAll(data, " ", "+")
}

func extract(link *url.URL, selectors Selectors, outputType string) {
	addQuery(link, CLI.DataURLEncode)
	feed, err := constructFeed(link, selectors)
	if err != nil {
		log.Fatal(err)
	}
	if selectors.Sort != "" {
		if err := sortItems(feed, selectors.Sort); err != nil {
			log.Fatal(err)
		}
	}

	var data string
	switch outputType {
	case "rss":
		data, err = feed.ToRss()
	case "atom":
		data, err = feed.ToAtom()
	case "json":
		data, err = feed.ToJSON()
	}
	if err != nil {
		log.Fatalln("exporting data:", err)
	}
	fmt.Fprintf(os.Stdout, "%s", data)
}

func sortItems(feed *feeds.Feed, s SortEnum) error {
	switch s {
	case SortReverse:
		for i := 0; i < len(feed.Items)/2; i++ {
			j := len(feed.Items) - i - 1
			feed.Items[i], feed.Items[j] = feed.Items[j], feed.Items[i]
		}
	case SortCreatedASD:
		sort.Sort(ByCreated(feed.Items))
	case SortCreatedDESC:
		sort.Sort(sort.Reverse(ByCreated(feed.Items)))
	case SortTitleASD:
		sort.Sort(ByTitle(feed.Items))
	case SortTitleDESC:
		sort.Sort(sort.Reverse(ByTitle(feed.Items)))
	}
	return nil
}

type ByCreated []*feeds.Item

func (a ByCreated) Len() int           { return len(a) }
func (a ByCreated) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
func (a ByCreated) Less(i, j int) bool { return a[i].Created.Before(a[j].Created) }

type ByTitle []*feeds.Item

func (a ByTitle) Len() int           { return len(a) }
func (a ByTitle) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
func (a ByTitle) Less(i, j int) bool { return strings.Compare(a[i].Title, a[j].Title) == -1 }
-- 
2.30.2

[PATCH ratt 2/2] Update install command in the documenation Export this patch

~mariusor
From: Marius Orcsik <marius@habarnam.ro>

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index fdbcb9a..48d0472 100644
--- a/README.md
+++ b/README.md
@@ -44,7 +44,7 @@ Config files are yaml files. ratt has some confs embedded. When calling eg: `rat

Install latest with go:

`go get git.sr.ht/~ghost08/ratt@latest`
`go install git.sr.ht/~ghost08/ratt/cmd/ratt@latest`

Install on Arch Linux from AUR with your favorite helper:

-- 
2.30.2