cpod

Yet another cron-friendly podcatcher

git clone https://git.8pit.net/cpod.git

  1// Copyright (C) 2013-2015 Sören Tempel
  2//
  3// This program is free software: you can redistribute it and/or modify
  4// it under the terms of the GNU General Public License as published by
  5// the Free Software Foundation, either version 3 of the License, or
  6// (at your option) any later version.
  7//
  8// This program is distributed in the hope that it will be useful,
  9// but WITHOUT ANY WARRANTY; without even the implied warranty of
 10// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 11// GNU General Public License for more details.
 12//
 13// You should have received a copy of the GNU General Public License
 14// along with this program. If not, see <http://www.gnu.org/licenses/>.
 15
 16package store
 17
 18import (
 19	"bufio"
 20	"github.com/nmeum/cpod/util"
 21	"github.com/nmeum/go-feedparser"
 22	"os"
 23)
 24
 25// Podcast represents a Podcast loaded from the store.
 26type Podcast struct {
 27	// URL to the feed.
 28	URL string
 29
 30	// Feed itself.
 31	Feed feedparser.Feed
 32
 33	// Error if parsing failed.
 34	Error error
 35}
 36
 37// Store represents a storage backend.
 38type Store struct {
 39	// path describes the URL file location.
 40	path string
 41
 42	// urls contains all URLs which are part of the URL file.
 43	urls []string
 44}
 45
 46// Load returns and creates a new store with the URL file located
 47// at the give filepath.
 48func Load(path string) (s *Store, err error) {
 49	s = new(Store)
 50	s.path = path
 51
 52	file, err := os.Open(path)
 53	if err != nil {
 54		return
 55	}
 56
 57	defer file.Close()
 58	scanner := bufio.NewScanner(file)
 59
 60	for scanner.Scan() {
 61		s.urls = append(s.urls, scanner.Text())
 62	}
 63
 64	err = scanner.Err()
 65	return
 66}
 67
 68// Add appends a new URL to the store. It doesn't check if the
 69// given data is a valid URL and it doesn't check if the URL
 70// is already a part of the store either.
 71func (s *Store) Add(url string) {
 72	s.urls = append(s.urls, url)
 73}
 74
 75// Contains returns true if the url is already a part of the
 76// store. If it isn't it returns false.
 77func (s *Store) Contains(url string) bool {
 78	for _, u := range s.urls {
 79		if u == url {
 80			return true
 81		}
 82	}
 83
 84	return false
 85}
 86
 87// Fetch fetches all feeds form the urls and returns a channel
 88// which contains all podcasts.
 89func (s *Store) Fetch() <-chan Podcast {
 90	out := make(chan Podcast)
 91	go func() {
 92		for _, url := range s.urls {
 93			resp, err := util.Get(url)
 94			if err != nil {
 95				continue
 96			}
 97
 98			reader := resp.Body
 99			defer reader.Close()
100
101			f, err := feedparser.Parse(reader)
102			out <- Podcast{url, f, err}
103		}
104
105		close(out)
106	}()
107
108	return out
109}
110
111// Save writes the URL file to the store path.
112func (s *Store) Save() error {
113	file, err := os.OpenFile(s.path, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
114	if err != nil {
115		return err
116	}
117
118	defer file.Close()
119	for _, url := range s.urls {
120		if _, err := file.WriteString(url + "\n"); err != nil {
121			return err
122		}
123	}
124
125	return nil
126}