--- /dev/null
+// Attempt at high level client library for AniDB's APIs
+package anidb
+
+import (
+ "time"
+)
+
+type AniDB struct {
+ Timeout time.Duration // Timeout for the various calls (default: 45s)
+
+ udp *udpWrap
+}
+
+// Initialises a new AniDB.
+func NewAniDB() *AniDB {
+ return &AniDB{
+ Timeout: 45 * time.Second,
+ udp: newUDPWrap(),
+ }
+}
--- /dev/null
+package anidb
+
+import (
+ "github.com/Kovensky/go-anidb/misc"
+ "strconv"
+ "time"
+)
+
+// See the constants list for the valid values.
+type AnimeType string
+
+const (
+ AnimeTypeTVSeries = AnimeType("TV Series") // Anime was a regular TV broadcast series
+ AnimeTypeTVSpecial = AnimeType("TV Special") // Anime was broadcast on TV as a special
+ AnimeTypeMovie = AnimeType("Movie") // Anime was a feature film
+ AnimeTypeOVA = AnimeType("OVA") // Anime was released direct-to-video
+ AnimeTypeWeb = AnimeType("Web") // Anime was released through online streaming or downloads
+ AnimeTypeMusicVideo = AnimeType("Music Video")
+)
+
+type Rating struct {
+ Rating float32
+ VoteCount int
+}
+
+type Resource []string
+
+// Links to third party websites
+type Resources struct {
+ AniDB,
+ ANN,
+ MyAnimeList,
+ AnimeNfo,
+ OfficialJapanese,
+ OfficialEnglish,
+ WikipediaEnglish,
+ WikipediaJapanese,
+ SyoboiSchedule,
+ AllCinema,
+ Anison,
+ VNDB,
+ MaruMegane Resource
+}
+
+type UniqueTitleMap map[Language]string
+type TitleMap map[Language][]string
+
+type Anime struct {
+ AID AID // The Anime ID.
+ R18 bool // Whether this anime is considered porn.
+
+ Type AnimeType // Production/distribution type.
+ TotalEpisodes int // Total number of regular episodes.
+ EpisodeCount EpisodeCount // Known numbers of the various types of episodes.
+
+ StartDate time.Time // Date of first episode release, if available.
+ EndDate time.Time // Date of last episode release, if available.
+
+ PrimaryTitle string // The primary title in the database; almost always a romanization of the Japanese title.
+ OfficialTitles UniqueTitleMap // The official title for each language.
+ ShortTitles TitleMap // Shortcut titles used for searches
+ Synonyms TitleMap // Synonyms for each language, or unofficial titles
+
+ OfficialURL string // URL for original official website.
+ Picture string // URL for the page picture on AniDB.
+
+ Description string
+
+ Votes Rating // Votes from people who watched the whole thing.
+ TemporaryVotes Rating // Votes from people who are still watching this.
+ Reviews Rating // Votes from reviewers.
+
+ Episodes Episodes // List of episodes.
+
+ Awards []string
+ Resources Resources
+
+ Incomplete bool // Set if the only the UDP API part of the query failed
+ Updated time.Time // When the data was last modified in the server
+ Cached time.Time // When the data was retrieved from the server
+}
+
+type EpisodeCount struct {
+ RegularCount int
+ SpecialCount int
+ CreditsCount int
+ OtherCount int
+ TrailerCount int
+ ParodyCount int
+}
+
+// Convenience method that runs AnimeByID on the result of
+// SearchAnime.
+func (adb *AniDB) AnimeByName(name string) <-chan *Anime {
+ return adb.AnimeByID(SearchAnime(name))
+}
+
+// Convenience method that runs AnimeByID on the result of
+// SearchAnimeFold.
+func (adb *AniDB) AnimeByNameFold(name string) <-chan *Anime {
+ return adb.AnimeByID(SearchAnimeFold(name))
+}
+
+// Searches for the given Episode in this Anime's Episodes list
+// and returns the match.
+//
+// Returns nil if there is no match.
+func (a *Anime) Episode(ep *misc.Episode) *Episode {
+ for i, e := range a.Episodes {
+ if ep.ContainsEpisodes(&e.Episode) {
+ return &a.Episodes[i]
+ }
+ }
+ return nil
+}
+
+// Convenience method that parses the string into an Episode
+// before doing the Episode search.
+func (a *Anime) EpisodeByString(name string) *Episode {
+ return a.Episode(misc.ParseEpisode(name))
+}
+
+// Convenience method that parses the int into an Episode
+// before doing the Episode search.
+//
+// Only works with regular (i.e. not special, etc) episodes.
+func (a *Anime) EpisodeByNumber(number int) *Episode {
+ return a.EpisodeByString(strconv.Itoa(number))
+}
--- /dev/null
+package anidb
+
+import (
+ "encoding/gob"
+ "fmt"
+ "github.com/Kovensky/go-anidb/http"
+ "github.com/Kovensky/go-anidb/misc"
+ "github.com/Kovensky/go-anidb/udp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+func init() {
+ gob.RegisterName("*github.com/Kovensky/go-anidb.Anime", &Anime{})
+}
+
+func (a *Anime) Touch() {
+ a.Cached = time.Now()
+}
+
+func (a *Anime) IsStale() bool {
+ if a == nil {
+ return true
+ }
+ return time.Now().Sub(a.Cached) > AnimeCacheDuration
+}
+
+// Unique Anime IDentifier.
+type AID int
+
+// Returns a cached Anime. Returns nil if there is no cached Anime with this AID.
+func (aid AID) Anime() *Anime {
+ a, _ := caches.Get(animeCache).Get(int(aid)).(*Anime)
+ return a
+}
+
+type httpAnimeResponse struct {
+ anime httpapi.Anime
+ err error
+}
+
+// Retrieves an Anime from the cache if possible. If it isn't cached,
+// or if the cache is stale, queries both the UDP and HTTP APIs
+// for data.
+//
+// Note: This can take at least 4 seconds during heavy traffic.
+func (adb *AniDB) AnimeByID(aid AID) <-chan *Anime {
+ ch := make(chan *Anime, 1)
+
+ anime := aid.Anime()
+ if !anime.IsStale() {
+ ch <- anime
+ close(ch)
+ return ch
+ }
+
+ ac := caches.Get(animeCache)
+ ic := make(chan Cacheable, 1)
+ go func() { ch <- (<-ic).(*Anime); close(ch) }()
+ if ac.Intent(int(aid), ic) {
+ return ch
+ }
+
+ go func() {
+ httpChan := make(chan httpAnimeResponse, 1)
+ go func() {
+ a, err := httpapi.GetAnime(int(aid))
+ httpChan <- httpAnimeResponse{anime: a, err: err}
+ }()
+ udpChan := adb.udp.SendRecv("ANIME",
+ paramMap{
+ "aid": aid,
+ "amask": animeAMask,
+ })
+
+ timeout := time.After(adb.Timeout)
+
+ if anime == nil {
+ anime = &Anime{AID: aid}
+ }
+ anime.Incomplete = true
+
+ ok := true
+
+ Loop:
+ for i := 0; i < 2; i++ {
+ select {
+ case <-timeout:
+ ok = false
+ case resp := <-httpChan:
+ if resp.err != nil {
+ ok = false
+ break Loop
+ }
+ if a := anime.populateFromHTTP(resp.anime); a == nil {
+ ok = false
+ break Loop
+ } else {
+ anime = a
+ }
+ httpChan = nil
+ case reply := <-udpChan:
+ anime.Incomplete = !anime.populateFromUDP(reply)
+ udpChan = nil
+ }
+ }
+ if anime.PrimaryTitle != "" {
+ if ok {
+ ac.Set(int(aid), anime)
+ } else {
+ ac.Flush(int(aid), anime)
+ }
+ } else {
+ ac.Set(int(aid), (*Anime)(nil))
+ }
+ }()
+ return ch
+}
+
+func (a *Anime) populateFromHTTP(reply httpapi.Anime) *Anime {
+ if reply.Error != "" {
+ return (*Anime)(nil)
+ }
+
+ if a.AID != AID(reply.ID) {
+ panic(fmt.Sprintf("Requested AID %d different from received AID %d", a.AID, reply.ID))
+ }
+ a.R18 = reply.R18
+
+ a.Type = AnimeType(reply.Type)
+ // skip episode count since it's unreliable; UDP API handles that
+
+ // UDP API has more precise versions
+ if a.Incomplete {
+ if st, err := time.Parse(httpapi.DateFormat, reply.StartDate); err == nil {
+ a.StartDate = st
+ }
+ if et, err := time.Parse(httpapi.DateFormat, reply.EndDate); err == nil {
+ a.EndDate = et
+ }
+ }
+
+ for _, title := range reply.Titles {
+ switch title.Type {
+ case "main":
+ if a.PrimaryTitle != "" {
+ // We assume there's only ever one "main" title
+ panic(
+ fmt.Sprintf("PrimaryTitle %q already set, new PrimaryTitle %q received!",
+ a.PrimaryTitle, title.Title))
+ }
+ a.PrimaryTitle = title.Title
+ case "official":
+ if a.OfficialTitles == nil {
+ a.OfficialTitles = make(UniqueTitleMap)
+ }
+ a.OfficialTitles[Language(title.Lang)] = title.Title
+ case "short":
+ if a.ShortTitles == nil {
+ a.ShortTitles = make(TitleMap)
+ }
+ a.ShortTitles[Language(title.Lang)] = append(a.ShortTitles[Language(title.Lang)], title.Title)
+ case "synonym":
+ if a.Synonyms == nil {
+ a.Synonyms = make(TitleMap)
+ }
+ a.Synonyms[Language(title.Lang)] = append(a.Synonyms[Language(title.Lang)], title.Title)
+ }
+ }
+
+ a.OfficialURL = reply.URL
+ if reply.Picture != "" {
+ a.Picture = httpapi.AniDBImageBaseURL + reply.Picture
+ }
+
+ a.Description = reply.Description
+
+ a.Votes = Rating{
+ Rating: reply.Ratings.Permanent.Rating,
+ VoteCount: reply.Ratings.Permanent.Count,
+ }
+ a.TemporaryVotes = Rating{
+ Rating: reply.Ratings.Temporary.Rating,
+ VoteCount: reply.Ratings.Temporary.Count,
+ }
+ a.Reviews = Rating{
+ Rating: reply.Ratings.Review.Rating,
+ VoteCount: reply.Ratings.Review.Count,
+ }
+
+ a.populateResources(reply.Resources)
+
+ counts := map[misc.EpisodeType]int{}
+
+ sort.Sort(reply.Episodes)
+ for _, ep := range reply.Episodes {
+ ad, _ := time.Parse(httpapi.DateFormat, ep.AirDate)
+
+ titles := make(UniqueTitleMap)
+ for _, title := range ep.Titles {
+ titles[Language(title.Lang)] = title.Title
+ }
+
+ e := Episode{
+ EID: EID(ep.ID),
+ AID: a.AID,
+
+ Episode: *misc.ParseEpisode(ep.EpNo.EpNo),
+
+ Length: time.Duration(ep.Length) * time.Minute,
+ AirDate: &ad,
+
+ Rating: Rating{
+ Rating: ep.Rating.Rating,
+ VoteCount: ep.Rating.Votes,
+ },
+ Titles: titles,
+ }
+ counts[e.Type]++
+ cacheEpisode(&e)
+
+ a.Episodes = append(a.Episodes, e)
+ }
+
+ a.EpisodeCount = EpisodeCount{
+ RegularCount: counts[misc.EpisodeTypeRegular],
+ SpecialCount: counts[misc.EpisodeTypeSpecial],
+ CreditsCount: counts[misc.EpisodeTypeCredits],
+ OtherCount: counts[misc.EpisodeTypeOther],
+ TrailerCount: counts[misc.EpisodeTypeTrailer],
+ ParodyCount: counts[misc.EpisodeTypeParody],
+ }
+
+ if a.Incomplete {
+ if !a.EndDate.IsZero() {
+ a.TotalEpisodes = a.EpisodeCount.RegularCount
+ }
+ }
+
+ return a
+}
+
+func (a *Anime) populateResources(list []httpapi.Resource) {
+ a.Resources.AniDB = Resource{fmt.Sprintf("http://anidb.net/a%v", a.AID)}
+
+ for _, res := range list {
+ args := make([][]interface{}, len(res.ExternalEntity))
+ for i, e := range res.ExternalEntity {
+ args[i] = make([]interface{}, len(e.Identifiers))
+ for j := range args[i] {
+ args[i][j] = e.Identifiers[j]
+ }
+ }
+
+ switch res.Type {
+ case 1: // ANN
+ for i := range res.ExternalEntity {
+ a.Resources.ANN =
+ append(a.Resources.ANN, fmt.Sprintf(httpapi.ANNFormat, args[i]...))
+ }
+ case 2: // MyAnimeList
+ for i := range res.ExternalEntity {
+ a.Resources.MyAnimeList =
+ append(a.Resources.MyAnimeList, fmt.Sprintf(httpapi.MyAnimeListFormat, args[i]...))
+ }
+ case 3: // AnimeNfo
+ for i := range res.ExternalEntity {
+ a.Resources.AnimeNfo =
+ append(a.Resources.AnimeNfo, fmt.Sprintf(httpapi.AnimeNfoFormat, args[i]...))
+ }
+ case 4: // OfficialJapanese
+ for _, e := range res.ExternalEntity {
+ for _, url := range e.URL {
+ a.Resources.OfficialJapanese = append(a.Resources.OfficialJapanese, url)
+ }
+ }
+ case 5: // OfficialEnglish
+ for _, e := range res.ExternalEntity {
+ for _, url := range e.URL {
+ a.Resources.OfficialEnglish = append(a.Resources.OfficialEnglish, url)
+ }
+ }
+ case 6: // WikipediaEnglish
+ for i := range res.ExternalEntity {
+ a.Resources.WikipediaEnglish =
+ append(a.Resources.WikipediaEnglish, fmt.Sprintf(httpapi.WikiEnglishFormat, args[i]...))
+ }
+ case 7: // WikipediaJapanese
+ for i := range res.ExternalEntity {
+ a.Resources.WikipediaJapanese =
+ append(a.Resources.WikipediaJapanese, fmt.Sprintf(httpapi.WikiJapaneseFormat, args[i]...))
+ }
+ case 8: // SyoboiSchedule
+ for i := range res.ExternalEntity {
+ a.Resources.SyoboiSchedule =
+ append(a.Resources.SyoboiSchedule, fmt.Sprintf(httpapi.SyoboiFormat, args[i]...))
+ }
+ case 9: // AllCinema
+ for i := range res.ExternalEntity {
+ a.Resources.AllCinema =
+ append(a.Resources.AllCinema, fmt.Sprintf(httpapi.AllCinemaFormat, args[i]...))
+ }
+ case 10: // Anison
+ for i := range res.ExternalEntity {
+ a.Resources.Anison =
+ append(a.Resources.Anison, fmt.Sprintf(httpapi.AnisonFormat, args[i]...))
+ }
+ case 14: // VNDB
+ for i := range res.ExternalEntity {
+ a.Resources.VNDB =
+ append(a.Resources.VNDB, fmt.Sprintf(httpapi.VNDBFormat, args[i]...))
+ }
+ case 15: // MaruMegane
+ for i := range res.ExternalEntity {
+ a.Resources.MaruMegane =
+ append(a.Resources.MaruMegane, fmt.Sprintf(httpapi.MaruMeganeFormat, args[i]...))
+ }
+ }
+ }
+}
+
+// http://wiki.anidb.info/w/UDP_API_Definition#ANIME:_Retrieve_Anime_Data
+// Everything that we can't easily get through the HTTP API, or that has more accuracy:
+// episodes, air date, end date, award list, update date,
+const animeAMask = "0000980201"
+
+func (a *Anime) populateFromUDP(reply udpapi.APIReply) bool {
+ if reply != nil && reply.Error() == nil {
+ parts := strings.Split(reply.Lines()[1], "|")
+
+ ints := make([]int64, len(parts))
+ for i, p := range parts {
+ ints[i], _ = strconv.ParseInt(p, 10, 32)
+ }
+
+ a.TotalEpisodes = int(ints[0]) // episodes
+ st := time.Unix(ints[1], 0) // air date
+ et := time.Unix(ints[2], 0) // end date
+ aw := strings.Split(parts[3], "'") // award list
+ ut := time.Unix(ints[4], 0) // update date
+
+ if len(parts[3]) > 0 {
+ a.Awards = aw
+ }
+
+ // 0 does not actually mean the Epoch here...
+ if ints[1] != 0 {
+ a.StartDate = st
+ }
+ if ints[2] != 0 {
+ a.EndDate = et
+ }
+ if ints[4] != 0 {
+ a.Updated = ut
+ }
+ return true
+ }
+ return false
+}
--- /dev/null
+package anidb
+
+import (
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rand"
+ "errors"
+ "io"
+ "runtime"
+)
+
+// We still have the key and IV somewhere in memory...
+// but it's better than plaintext.
+type credentials struct {
+ username []byte
+ password []byte
+ udpKey []byte
+}
+
+func (c *credentials) shred() {
+ if c != nil {
+ io.ReadFull(rand.Reader, c.username)
+ io.ReadFull(rand.Reader, c.password)
+ io.ReadFull(rand.Reader, c.udpKey)
+ c.username = nil
+ c.password = nil
+ c.udpKey = nil
+ }
+}
+
+// Randomly generated on every execution
+var aesKey []byte
+
+func init() {
+ aesKey = make([]byte, aes.BlockSize)
+ if _, err := io.ReadFull(rand.Reader, aesKey); err != nil {
+ panic(err)
+ }
+}
+
+func crypt(plaintext string) []byte {
+ p := []byte(plaintext)
+
+ block, err := aes.NewCipher(aesKey)
+ if err != nil {
+ panic(err)
+ }
+
+ ciphertext := make([]byte, len(p)+aes.BlockSize)
+ iv := ciphertext[:aes.BlockSize]
+ if _, err := io.ReadFull(rand.Reader, iv); err != nil {
+ panic(err)
+ }
+
+ stream := cipher.NewCTR(block, iv)
+ stream.XORKeyStream(ciphertext[aes.BlockSize:], p)
+
+ return ciphertext
+}
+
+func decrypt(ciphertext []byte) string {
+ if len(ciphertext) <= aes.BlockSize {
+ return ""
+ }
+ p := make([]byte, len(ciphertext)-aes.BlockSize)
+
+ block, err := aes.NewCipher(aesKey)
+ if err != nil {
+ panic(err)
+ }
+
+ stream := cipher.NewCTR(block, ciphertext[:aes.BlockSize])
+ stream.XORKeyStream(p, ciphertext[aes.BlockSize:])
+
+ return string(p)
+}
+
+func newCredentials(username, password, udpKey string) *credentials {
+ return &credentials{
+ username: crypt(username),
+ password: crypt(password),
+ udpKey: crypt(udpKey),
+ }
+}
+
+// Re-authenticates the current user. Do not use unless really necessary.
+func (a *AniDB) ReAuth() error {
+ return a.udp.ReAuth()
+}
+
+func (udp *udpWrap) ReAuth() error {
+ if c := udp.credentials; c != nil {
+ defer runtime.GC() // any better way to clean the plaintexts?
+
+ udp.connected = true
+ return udp.Auth(
+ decrypt(c.username),
+ decrypt(c.password),
+ decrypt(c.udpKey))
+ }
+ return errors.New("No credentials stored")
+}
+
+// Saves the used credentials in the AniDB struct, to allow automatic
+// re-authentication when needed; they are (properly) encrypted with a key that's
+// uniquely generated every time the module is initialized.
+func (a *AniDB) SetCredentials(username, password, udpKey string) {
+ a.udp.credentials.shred()
+ a.udp.credentials = newCredentials(username, password, udpKey)
+}
+
+// Authenticates to anidb's UDP API and, on success, stores the credentials using
+// SetCredentials. If udpKey is not "", the communication with the server
+// will be encrypted, but in the VERY weak ECB mode.
+func (a *AniDB) Auth(username, password, udpKey string) (err error) {
+ defer runtime.GC() // any better way to clean the plaintexts?
+
+ if err = a.udp.Auth(username, password, udpKey); err == nil {
+ a.udp.connected = true
+ a.SetCredentials(username, password, udpKey)
+ }
+ return
+}
+
+// Logs the user out and removes the credentials from the AniDB struct.
+func (a *AniDB) Logout() error {
+ if a.udp.connected {
+ a.udp.credentials.shred()
+ a.udp.credentials = nil
+
+ a.udp.connected = false
+ return a.udp.Logout()
+ }
+ return nil
+}
--- /dev/null
+package anidb
+
+import (
+ "bytes"
+ "compress/gzip"
+ "encoding/gob"
+ "errors"
+ "io"
+ "log"
+ "os"
+ "reflect"
+ "sync"
+)
+
+// Loads caches from the given path.
+func LoadCachesFromFile(f string) (err error) {
+ fh, err := os.Open(f)
+
+ if err != nil {
+ return err
+ }
+ defer fh.Close()
+ return LoadCaches(fh)
+}
+
+const cacheMajorVersion = 0
+const cacheMinorVersion = 0
+
+type cacheDataVer struct {
+ ver int
+ data interface{}
+}
+
+type lockable interface {
+ Lock()
+ Unlock()
+}
+
+type rlockable interface {
+ lockable
+ RLock()
+ RUnlock()
+}
+
+func getLocks() []lockable {
+ return []lockable{
+ // caches is special-cased
+ &eidAidLock,
+ &ed2kFidLock,
+ &banTimeLock,
+ &titlesFileDataLock,
+ }
+}
+
+func getCacheData() []cacheDataVer {
+ return []cacheDataVer{
+ cacheDataVer{0, &titlesFileData},
+ cacheDataVer{0, &caches.Caches},
+ cacheDataVer{0, &eidAidMap},
+ cacheDataVer{0, &ed2kFidMap},
+ cacheDataVer{0, &banTime}}
+}
+
+// Loads caches from the given io.Reader.
+func LoadCaches(r io.Reader) (err error) {
+ defer func() { log.Println("Loaded with error", err) }()
+
+ caches.LockAll() // no defer UnlockAll -- the mutexes get reset
+ defer caches.m.Unlock() // but we need to unlock this
+ for _, lock := range getLocks() {
+ lock.Lock()
+ defer lock.Unlock()
+ }
+
+ // make sure caches' mutexes are reset even on a decoding failure
+ defer func() {
+ for _, cache := range caches.Caches {
+ cache.m = sync.RWMutex{}
+ }
+ }()
+
+ gz, err := gzip.NewReader(r)
+ if err != nil {
+ return err
+ }
+
+ dec := gob.NewDecoder(gz)
+ version := 0
+
+ if err = dec.Decode(&version); err != nil {
+ return err
+ }
+
+ if version != cacheMajorVersion {
+ return errors.New("Cache major version mismatch")
+ }
+
+ defer func() {
+ titlesDB.LoadDB(bytes.NewReader(titlesFileData))
+
+ for _, cache := range caches.Caches {
+ cache.intent = make(map[int][]chan Cacheable)
+ }
+ }()
+
+ version = 0
+ for _, v := range append([]cacheDataVer{
+ cacheDataVer{0, &version}}, getCacheData()...) {
+ if v.ver > version {
+ break
+ }
+ if err = dec.Decode(v.data); err != nil {
+ return err
+ }
+ }
+
+ if version != cacheMinorVersion {
+ return errors.New("Cache minor version mismatch")
+ }
+ return nil
+}
+
+// Saves caches to the given path.
+func DumpCachesToFile(f string) (err error) {
+ fh, err := os.Create(f)
+ if err != nil {
+ return err
+ }
+ defer fh.Close()
+ return DumpCaches(fh)
+}
+
+// Saves caches to the given io.Writer.
+//
+// The cache is a gzipped, versioned gob of the various internal
+// caches.
+func DumpCaches(w io.Writer) (err error) {
+ defer func() { log.Println("Dumped with error", err) }()
+
+ caches.RLockAll()
+ defer caches.RUnlockAll()
+ for _, lock := range getLocks() {
+ if l, ok := lock.(rlockable); ok {
+ l.RLock()
+ defer l.RUnlock()
+ } else {
+ lock.Lock()
+ defer lock.Unlock()
+ }
+ }
+
+ gz, err := gzip.NewWriterLevel(w, gzip.BestCompression)
+ if err != nil {
+ return err
+ }
+ defer gz.Close()
+
+ enc := gob.NewEncoder(gz)
+
+ for _, v := range append([]cacheDataVer{
+ cacheDataVer{0, cacheMajorVersion},
+ cacheDataVer{0, cacheMinorVersion},
+ }, getCacheData()...) {
+ if err = enc.Encode(v.data); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type Cacheable interface {
+ // Updates the last modified time
+ Touch()
+ // Returns true if the Cacheable is nil, or if the last modified time is too old.
+ IsStale() bool
+}
+
+var caches = initCacheMap()
+
+type cacheMap struct {
+ m sync.RWMutex
+ Caches map[cacheType]*baseCache
+}
+
+type cacheType int
+
+const (
+ animeCache = cacheType(iota)
+ episodeCache
+ groupCache
+ fileCache
+)
+
+func initCacheMap() *cacheMap {
+ return &cacheMap{
+ Caches: map[cacheType]*baseCache{
+ animeCache: newBaseCache(),
+ episodeCache: newBaseCache(),
+ groupCache: newBaseCache(),
+ fileCache: newBaseCache(),
+ },
+ }
+}
+
+func (c *cacheMap) Get(typ cacheType) *baseCache {
+ c.m.RLock()
+ defer c.m.RUnlock()
+
+ return c.Caches[typ]
+}
+
+func (c *cacheMap) LockAll() {
+ c.m.Lock()
+
+ for _, cache := range c.Caches {
+ cache.m.Lock()
+ }
+}
+func (c *cacheMap) UnlockAll() {
+ c.m.Unlock()
+
+ for _, cache := range c.Caches {
+ cache.m.Unlock()
+ }
+}
+
+func (c *cacheMap) RLockAll() {
+ c.m.RLock()
+
+ for _, cache := range c.Caches {
+ cache.m.RLock()
+ }
+}
+func (c *cacheMap) RUnlockAll() {
+ c.m.RUnlock()
+
+ for _, cache := range c.Caches {
+ cache.m.RUnlock()
+ }
+}
+
+type baseCache struct {
+ m sync.RWMutex
+ Cache map[int]Cacheable
+ intent map[int][]chan Cacheable
+}
+
+func newBaseCache() *baseCache {
+ return &baseCache{
+ Cache: make(map[int]Cacheable),
+ intent: make(map[int][]chan Cacheable),
+ }
+}
+
+func (c *baseCache) Get(id int) Cacheable {
+ c.m.RLock()
+ defer c.m.RUnlock()
+
+ return c.Cache[id]
+}
+
+// Sends the Cacheable to all channels that registered
+// Intent and clears the Intent list.
+func (c *baseCache) Flush(id int, v Cacheable) {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ c._flush(id, v)
+}
+
+func (c *baseCache) _flush(id int, v Cacheable) {
+ for _, ch := range c.intent[id] {
+ ch <- v
+ close(ch)
+ }
+ delete(c.intent, id)
+}
+
+// Caches if v is not nil and then Flushes the Intents.
+func (c *baseCache) Set(id int, v Cacheable) {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ if !reflect.ValueOf(v).IsNil() {
+ v.Touch()
+ c.Cache[id] = v
+ }
+
+ c._flush(id, v)
+}
+
+// Register the Intent to get the cache data for this id when
+// it's available. Returns false if the caller was the first
+// to register it.
+func (c *baseCache) Intent(id int, ch chan Cacheable) (ok bool) {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ list, ok := c.intent[id]
+ c.intent[id] = append(list, ch)
+
+ return ok
+}
--- /dev/null
+// +build never
+
+package anidb
+
+// Copy&paste this for new cache types
+// globally replace: Strut strut SID sid
+
+import (
+ "sync"
+ "time"
+)
+
+type Strut struct {
+ Cached time.Time
+}
+
+func (v *Strut) touch() {
+ v.Cached = time.Now()
+}
+func (v *Strut) isStale(d time.Duration) bool {
+ return time.Now().Sub(v.Cached) > d
+}
+
+type SID int
+
+func (sid SID) Strut() *Strut {
+ return strutCache.Get(sid)
+}
+
+var StrutCacheDuration = DefaultCacheDuration
+
+var strutCache = strutCacheStruct{baseCache: newBaseCache()}
+
+type strutCacheStruct struct{ baseCache }
+
+func (c *strutCacheStruct) Get(id SID) *Strut {
+ return c.baseCache.Get(int(id)).(*Strut)
+}
+
+func (c *strutCacheStruct) Set(id SID, v *Strut) {
+ c.baseCache.Set(int(id), v)
+}
+
+func (c *strutCacheStruct) Intent(id SID, ch chan *Strut) (ok bool) {
+ ch2 := make(chan cacheable, 1)
+ go func() { ch <- (<-ch2).(*Strut) }()
+ return c.baseCache.Intent(int(id), ch2)
+}
+
+func (adb *AniDB) StrutBySID(id SID) <-chan *Strut {
+ ch := make(chan *Strut, 1)
+ if v := id.Strut(); !v.isStale(StrutCacheDuration) {
+ ch <- v
+ close(ch)
+ return ch
+ }
+
+ if strutCache.Intent(id, ch) {
+ return ch
+ }
+
+ go func() {
+ var v *Strut
+ strutCache.Set(id, v)
+ }()
+ return ch
+}
--- /dev/null
+package anidb
+
+import (
+ "github.com/Kovensky/go-anidb/misc"
+ "time"
+)
+
+type Episode struct {
+ EID EID // The Episode ID.
+ AID AID // The Anime ID this Episode belongs to.
+
+ // Type, Number
+ misc.Episode
+
+ Length time.Duration // rounded somehow to minutes
+
+ AirDate *time.Time // The original release date, if available.
+ Rating Rating // Episode-specific ratings.
+
+ Titles UniqueTitleMap // Map with a title for each language
+
+ Cached time.Time // When the data was retrieved from the server
+}
+
+type Episodes []Episode
--- /dev/null
+package anidb
+
+import (
+ "encoding/gob"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+)
+
+func init() {
+ gob.RegisterName("*github.com/Kovensky/go-anidb.Episode", &Episode{})
+}
+
+func (e *Episode) Touch() {
+ e.Cached = time.Now()
+}
+
+func (e *Episode) IsStale() bool {
+ if e == nil {
+ return true
+ }
+ return time.Now().Sub(e.Cached) > EpisodeCacheDuration
+}
+
+var eidAidMap = map[EID]AID{}
+var eidAidLock = sync.RWMutex{}
+
+// Unique Episode IDentifier.
+type EID int
+
+// Retrieves the Episode corresponding to this EID from the cache.
+func (eid EID) Episode() *Episode {
+ e, _ := caches.Get(episodeCache).Get(int(eid)).(*Episode)
+ return e
+}
+
+func cacheEpisode(ep *Episode) {
+ eidAidLock.Lock()
+ defer eidAidLock.Unlock()
+
+ eidAidMap[ep.EID] = ep.AID
+ caches.Get(episodeCache).Set(int(ep.EID), ep)
+}
+
+// Retrieves the Episode from the cache if possible.
+//
+// If the result is stale, then queries the UDP API to
+// know which AID owns this EID, then gets the episodes
+// from the Anime.
+func (adb *AniDB) EpisodeByID(eid EID) <-chan *Episode {
+ ch := make(chan *Episode, 1)
+
+ if e := eid.Episode(); e != nil && !e.IsStale() {
+ ch <- e
+ close(ch)
+ return ch
+ }
+
+ ec := caches.Get(episodeCache)
+ ic := make(chan Cacheable, 1)
+ go func() { ch <- (<-ic).(*Episode); close(ch) }()
+ if ec.Intent(int(eid), ic) {
+ return ch
+ }
+
+ go func() {
+ // The UDP API data is worse than the HTTP API anime data,
+ // try and get from the corresponding Anime
+
+ eidAidLock.RLock()
+ aid, ok := eidAidMap[eid]
+ eidAidLock.RUnlock()
+
+ udpDone := false
+
+ var e *Episode
+ for i := 0; i < 2; i++ {
+ if !ok && udpDone {
+ // couldn't get anime and we already ran the EPISODE query
+ break
+ }
+
+ if !ok {
+ // We don't know what the AID is yet.
+ reply := <-adb.udp.SendRecv("EPISODE", paramMap{"eid": eid})
+
+ if reply.Error() == nil {
+ parts := strings.Split(reply.Lines()[1], "|")
+
+ if id, err := strconv.ParseInt(parts[1], 10, 32); err == nil {
+ ok = true
+ aid = AID(id)
+ }
+ } else {
+ break
+ }
+ udpDone = true
+ }
+ <-adb.AnimeByID(AID(aid)) // this caches episodes...
+ e = eid.Episode() // ...so this is now a cache hit
+
+ if e != nil {
+ break
+ } else {
+ // if this is somehow still a miss, then the EID<->AID map broke
+ eidAidLock.Lock()
+ delete(eidAidMap, eid)
+ eidAidLock.Unlock()
+
+ ok = false
+ }
+ }
+ // Caching (and channel broadcasting) done by AnimeByID
+ }()
+ return ch
+}
--- /dev/null
+package anidb
+
+import (
+ "fmt"
+ "github.com/Kovensky/go-anidb/misc"
+ "image"
+ "time"
+)
+
+type FileVersion int
+
+func (v FileVersion) String() string {
+ if v == 1 {
+ return ""
+ }
+ return fmt.Sprintf("v%d", int(v))
+}
+
+type FileSource string
+
+type AudioStream struct {
+ Codec string
+ Bitrate int
+ Language Language
+}
+
+type VideoInfo struct {
+ Codec string
+ Bitrate int
+ Resolution image.Rectangle
+ ColorDepth int
+}
+
+type File struct {
+ FID FID
+
+ AID AID
+ EID EID
+ GID GID
+
+ Incomplete bool
+
+ Deprecated bool
+ CRCMatch bool
+ BadCRC bool
+ Version FileVersion
+ Uncensored bool // Meaning unclear, may not be simply !Censored
+ Censored bool // Meaning unclear, may not be simply !Uncensored
+
+ Filesize int64
+ Ed2kHash string
+ SHA1Hash string
+ CRC32 string
+
+ Length time.Duration
+ AirDate time.Time
+
+ AudioStreams []AudioStream
+ SubtitleLanguages []Language
+ VideoInfo VideoInfo
+ FileExtension string
+
+ Source FileSource
+
+ OtherEpisodes misc.EpisodeList
+
+ Cached time.Time
+}
--- /dev/null
+package anidb
+
+import (
+ "encoding/gob"
+ "fmt"
+ "github.com/Kovensky/go-anidb/misc"
+ "github.com/Kovensky/go-anidb/udp"
+ "image"
+ "log"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+)
+
+func init() {
+ gob.RegisterName("*github.com/Kovensky/go-anidb.File", &File{})
+}
+
+func (f *File) Touch() {
+ f.Cached = time.Now()
+}
+
+func (f *File) IsStale() bool {
+ if f == nil {
+ return true
+ }
+ return time.Now().Sub(f.Cached) > FileCacheDuration
+}
+
+type FID int
+
+func (fid FID) File() *File {
+ f, _ := caches.Get(fileCache).Get(int(fid)).(*File)
+ return f
+}
+
+func ed2kKey(ed2k string, size int64) string {
+ return fmt.Sprintf("%s-%016x", ed2k, size)
+}
+
+func ed2kCache(f *File) {
+ if f != nil {
+ ed2kFidLock.Lock()
+ defer ed2kFidLock.Unlock()
+ ed2kFidMap[ed2kKey(f.Ed2kHash, f.Filesize)] = f.FID
+ }
+}
+
+// Prefetches the Anime, Episode and Group that this
+// file is linked to using the given AniDB instance.
+//
+// Returns a channel where this file will be sent to
+// when the prefetching is done; if the file is nil,
+// the channel will return nil.
+func (f *File) Prefetch(adb *AniDB) <-chan *File {
+ ch := make(chan *File, 1)
+ go func() {
+ if f != nil {
+ a := adb.AnimeByID(f.AID)
+ g := adb.GroupByID(f.GID)
+ <-a
+ <-g
+ ch <- f
+ }
+ close(ch)
+ }()
+ return ch
+}
+
+var ed2kFidMap = map[string]FID{}
+var ed2kIntent = map[string][]chan *File{}
+var ed2kFidLock = sync.RWMutex{}
+
+func (adb *AniDB) FileByID(fid FID) <-chan *File {
+ ch := make(chan *File, 1)
+ if f := fid.File(); !f.IsStale() {
+ ch <- f
+ close(ch)
+ return ch
+ }
+
+ fc := caches.Get(fileCache)
+ ic := make(chan Cacheable, 1)
+ go func() { ch <- (<-ic).(*File); close(ch) }()
+ if fc.Intent(int(fid), ic) {
+ return ch
+ }
+
+ go func() {
+ reply := <-adb.udp.SendRecv("FILE",
+ paramMap{
+ "fid": fid,
+ "fmask": fileFmask,
+ "amask": fileAmask,
+ })
+
+ var f *File
+ if reply.Error() == nil {
+ f = parseFileResponse(reply)
+ }
+ ed2kCache(f)
+ fc.Set(int(fid), f)
+ }()
+ return ch
+}
+
+func (adb *AniDB) FileByEd2kSize(ed2k string, size int64) <-chan *File {
+ key := ed2kKey(ed2k, size)
+ ch := make(chan *File, 1)
+
+ ed2kFidLock.RLock()
+ if fid, ok := ed2kFidMap[key]; ok {
+ ed2kFidLock.RUnlock()
+ if f := fid.File(); f != nil {
+ ch <- f
+ close(ch)
+ return ch
+ }
+ return adb.FileByID(fid)
+ }
+ ed2kFidLock.RUnlock()
+
+ ed2kFidLock.Lock()
+ if list, ok := ed2kIntent[key]; ok {
+ ed2kIntent[key] = append(list, ch)
+ return ch
+ } else {
+ ed2kIntent[key] = append(list, ch)
+ }
+
+ go func() {
+ reply := <-adb.udp.SendRecv("FILE",
+ paramMap{
+ "ed2k": ed2k,
+ "size": size,
+ "fmask": fileFmask,
+ "amask": fileAmask,
+ })
+
+ var f *File
+ if reply.Error() == nil {
+ f = parseFileResponse(reply)
+
+ ed2kCache(f)
+ caches.Get(fileCache).Set(int(f.FID), f)
+ } else if reply.Code() == 320 { // file not found
+ ed2kFidLock.Lock()
+ delete(ed2kFidMap, key)
+ ed2kFidLock.Unlock()
+ } else if reply.Code() == 322 { // multiple files found
+ panic("Don't know what to do with " + strings.Join(reply.Lines(), "\n"))
+ }
+
+ ed2kFidLock.Lock()
+ defer ed2kFidLock.Unlock()
+
+ for _, ch := range ed2kIntent[key] {
+ ch <- f
+ close(ch)
+ }
+ delete(ed2kIntent, key)
+ }()
+ return ch
+}
+
+var fileFmask = "77da7fe8"
+var fileAmask = "00008000"
+
+const (
+ fileStateCRCOK = 1 << iota
+ fileStateCRCERR
+ fileStateV2
+ fileStateV3
+ fileStateV4
+ fileStateV5
+ fileStateUncensored
+ fileStateCensored
+)
+
+func sanitizeCodec(codec string) string {
+ switch codec {
+ case "MP3 CBR":
+ return "MP3"
+ case "WMV9 (also WMV3)":
+ return "WMV9"
+ case "Ogg (Vorbis)":
+ return "Vorbis"
+ case "H264/AVC":
+ return "H.264"
+ }
+ return codec
+}
+
+func parseFileResponse(reply udpapi.APIReply) *File {
+ if reply.Error() != nil {
+ return nil
+ }
+ if reply.Truncated() {
+ panic("Truncated")
+ }
+
+ parts := strings.Split(reply.Lines()[1], "|")
+ ints := make([]int64, len(parts))
+ for i, p := range parts {
+ ints[i], _ = strconv.ParseInt(parts[i], 10, 64)
+ log.Printf("#%d: %s\n", i, p)
+ }
+
+ // how does epno look like?
+ log.Println("epno: " + parts[23])
+
+ version := FileVersion(1)
+ switch i := ints[6]; {
+ case i&fileStateV5 != 0:
+ version = 5
+ case i&fileStateV4 != 0:
+ version = 4
+ case i&fileStateV3 != 0:
+ version = 3
+ case i&fileStateV2 != 0:
+ version = 2
+ }
+
+ // codecs (parts[13]), bitrates (ints[14]), langs (parts[19])
+ codecs := strings.Split(parts[13], "'")
+ bitrates := strings.Split(parts[14], "'")
+ alangs := strings.Split(parts[19], "'")
+ streams := make([]AudioStream, len(codecs))
+ for i := range streams {
+ br, _ := strconv.ParseInt(bitrates[i], 10, 32)
+ streams[i] = AudioStream{
+ Bitrate: int(br),
+ Codec: sanitizeCodec(codecs[i]),
+ Language: Language(alangs[i]),
+ }
+ }
+
+ sl := strings.Split(parts[20], "'")
+ slangs := make([]Language, len(sl))
+ for i := range sl {
+ slangs[i] = Language(sl[i])
+ }
+
+ depth := int(ints[11])
+ if depth == 0 {
+ depth = 8
+ }
+ res := strings.Split(parts[17], "x")
+ width, _ := strconv.ParseInt(res[0], 10, 32)
+ height, _ := strconv.ParseInt(res[1], 10, 32)
+ video := VideoInfo{
+ Bitrate: int(ints[16]),
+ Codec: sanitizeCodec(parts[15]),
+ ColorDepth: depth,
+ Resolution: image.Rect(0, 0, int(width), int(height)),
+ }
+
+ return &File{
+ FID: FID(ints[0]),
+
+ AID: AID(ints[1]),
+ EID: EID(ints[2]),
+ GID: GID(ints[3]),
+
+ OtherEpisodes: misc.ParseEpisodeList(parts[4]).Simplify(),
+ Deprecated: ints[5] != 0,
+
+ CRCMatch: ints[6]&fileStateCRCOK != 0,
+ BadCRC: ints[6]&fileStateCRCERR != 0,
+ Version: version,
+ Uncensored: ints[6]&fileStateUncensored != 0,
+ Censored: ints[6]&fileStateCensored != 0,
+
+ Incomplete: video.Resolution.Empty(),
+
+ Filesize: ints[7],
+ Ed2kHash: parts[8],
+ SHA1Hash: parts[9],
+ CRC32: parts[10],
+
+ Source: FileSource(parts[12]),
+
+ AudioStreams: streams,
+ SubtitleLanguages: slangs,
+ VideoInfo: video,
+ FileExtension: parts[18],
+
+ Length: time.Duration(ints[21]) * time.Second,
+ AirDate: time.Unix(ints[22], 0),
+ }
+}
--- /dev/null
+package anidb
+
+import (
+ "encoding/json"
+ "strconv"
+ "time"
+)
+
+// See the constants list for valid values.
+type GroupRelationType int
+
+const (
+ GroupParticipantIn = GroupRelationType(1 + iota)
+ GroupParentOf
+ _
+ GroupMergedFrom
+ GroupNowKnownAs
+ GroupOther
+
+ GroupChildOf = GroupRelationType(102)
+)
+
+func (gr GroupRelationType) String() string {
+ switch gr {
+ case GroupParticipantIn:
+ return "Participated In"
+ case GroupParentOf:
+ return "Parent Of"
+ case GroupMergedFrom:
+ return "Merged From"
+ case GroupNowKnownAs:
+ return "Now Known As"
+ case GroupOther:
+ return "Other"
+ case GroupChildOf:
+ return "Child Of"
+ default:
+ return "Unknown"
+ }
+}
+
+type GroupRelations map[GID]GroupRelationType
+
+func (gr GroupRelations) MarshalJSON() ([]byte, error) {
+ generic := make(map[string]int, len(gr))
+ for k, v := range gr {
+ generic[strconv.Itoa(int(k))] = int(v)
+ }
+ return json.Marshal(generic)
+}
+
+func (gr GroupRelations) UnmarshalJSON(b []byte) error {
+ var generic map[string]int
+ if err := json.Unmarshal(b, &generic); err != nil {
+ return err
+ }
+ for k, v := range generic {
+ ik, err := strconv.ParseInt(k, 10, 32)
+ if err != nil {
+ return err
+ }
+
+ gr[GID(ik)] = GroupRelationType(v)
+ }
+
+ return nil
+}
+
+type Group struct {
+ GID GID
+
+ Name string // Full name
+ ShortName string // Abbreviated name
+
+ IRC string // irc: schema format
+ URL string
+ Picture string
+
+ Founded time.Time
+ Disbanded time.Time
+
+ LastRelease time.Time
+ LastActivity time.Time
+
+ Rating Rating
+ AnimeCount int // Number of anime this group has worked on
+ FileCount int // Number of files this group has released
+
+ RelatedGroups GroupRelations
+
+ Cached time.Time
+}
--- /dev/null
+package anidb
+
+import (
+ "encoding/gob"
+ "github.com/Kovensky/go-anidb/http"
+ "strconv"
+ "strings"
+ "time"
+)
+
+func init() {
+ gob.RegisterName("*github.com/Kovensky/go-anidb.Group", &Group{})
+}
+
+func (g *Group) Touch() {
+ g.Cached = time.Now()
+}
+
+func (g *Group) IsStale() bool {
+ if g == nil {
+ return true
+ }
+ return time.Now().Sub(g.Cached) > GroupCacheDuration
+}
+
+// Unique Group IDentifier
+type GID int
+
+// Retrieves the Group from the cache.
+func (gid GID) Group() *Group {
+ g, _ := caches.Get(groupCache).Get(int(gid)).(*Group)
+ return g
+}
+
+// Returns a Group from the cache if possible.
+//
+// If the Group is stale, then retrieves the Group
+// through the UDP API.
+func (adb *AniDB) GroupByID(gid GID) <-chan *Group {
+ ch := make(chan *Group, 1)
+ if g := gid.Group(); !g.IsStale() {
+ ch <- g
+ close(ch)
+ return ch
+ }
+
+ gc := caches.Get(groupCache)
+
+ ic := make(chan Cacheable, 1)
+ go func() { ch <- (<-ic).(*Group); close(ch) }()
+
+ if gc.Intent(int(gid), ic) {
+ return ch
+ }
+
+ go func() {
+ reply := <-adb.udp.SendRecv("GROUP",
+ paramMap{"gid": gid})
+
+ var g *Group
+ if reply.Error() == nil {
+ parts := strings.Split(reply.Lines()[1], "|")
+ ints := make([]int64, len(parts))
+ for i := range parts {
+ ints[i], _ = strconv.ParseInt(parts[i], 10, 32)
+ }
+
+ irc := ""
+ if parts[7] != "" {
+ irc = "irc://" + parts[8] + "/" + parts[7][1:]
+ }
+
+ pic := ""
+ if parts[10] != "" {
+ pic = httpapi.AniDBImageBaseURL + parts[10]
+ }
+
+ rellist := strings.Split(parts[16], "'")
+ relations := make(map[GID]GroupRelationType, len(rellist))
+ for _, rel := range rellist {
+ r := strings.Split(rel, ",")
+ gid, _ := strconv.ParseInt(r[0], 10, 32)
+ typ, _ := strconv.ParseInt(r[1], 10, 32)
+
+ relations[GID(gid)] = GroupRelationType(typ)
+ }
+
+ ft := time.Unix(ints[11], 0)
+ if ints[11] == 0 {
+ ft = time.Time{}
+ }
+ dt := time.Unix(ints[12], 0)
+ if ints[12] == 0 {
+ dt = time.Time{}
+ }
+ lr := time.Unix(ints[14], 0)
+ if ints[14] == 0 {
+ lr = time.Time{}
+ }
+ la := time.Unix(ints[15], 0)
+ if ints[15] == 0 {
+ la = time.Time{}
+ }
+
+ g = &Group{
+ GID: GID(ints[0]),
+
+ Name: parts[5],
+ ShortName: parts[6],
+
+ IRC: irc,
+ URL: parts[9],
+ Picture: pic,
+
+ Founded: ft,
+ Disbanded: dt,
+ // ignore ints[13]
+ LastRelease: lr,
+ LastActivity: la,
+
+ Rating: Rating{
+ Rating: float32(ints[1]) / 100,
+ VoteCount: int(ints[2]),
+ },
+ AnimeCount: int(ints[3]),
+ FileCount: int(ints[4]),
+
+ RelatedGroups: relations,
+
+ Cached: time.Now(),
+ }
+ }
+ gc.Set(int(gid), g)
+ }()
+ return ch
+}
--- /dev/null
+package anidb
+
+import (
+ "time"
+)
+
+type Language string
+
+var (
+ // Default durations for the various caches.
+ // Used by the IsStale methods.
+ DefaultCacheDuration = 7 * 24 * time.Hour
+ AnimeCacheDuration = DefaultCacheDuration
+ EpisodeCacheDuration = DefaultCacheDuration
+ GroupCacheDuration = 4 * DefaultCacheDuration // They don't change that often.
+ FileCacheDuration = 8 * DefaultCacheDuration // These change even less often.
+
+ // Used when there's some data missing on a file.
+ // Usually happens because the AVDump data hasn't been merged with the database
+ // yet, which is done on a daily cron job.
+ FileIncompleteCacheDuration = 24 * time.Hour
+)
--- /dev/null
+package anidb
+
+import (
+ "bytes"
+ "github.com/Kovensky/go-anidb/titles"
+ "io"
+ "net/http"
+ "sync"
+ "time"
+)
+
+var titlesFileData []byte
+var titlesFileDataLock sync.Mutex
+var titlesDB = &titles.TitlesDatabase{}
+
+// Loads the anime-titles database from the given io.Reader.
+//
+// Caches the io.Reader's contents on memory, which gets saved
+// by DumpCaches.
+func LoadTitles(src io.Reader) error {
+ buf := bytes.Buffer{}
+ _, err := io.Copy(&buf, src)
+ if err != nil && err != io.EOF {
+ return err
+ }
+
+ titlesFileDataLock.Lock()
+ defer titlesFileDataLock.Unlock()
+
+ titlesFileData = buf.Bytes()
+
+ titlesDB.LoadDB(bytes.NewReader(titlesFileData))
+
+ return nil
+}
+
+// Saves the currently cached anime-titles database to the given io.Writer.
+func (adb *AniDB) SaveCurrentTitles(dst io.Writer) (int64, error) {
+ return io.Copy(dst, bytes.NewReader(titlesFileData))
+}
+
+// Returns true if the titles database is up-to-date (newer than 24 hours).
+func TitlesUpToDate() (ok bool) {
+ return time.Now().Sub(titlesDB.UpdateTime) < 24*time.Hour
+}
+
+// Downloads a new anime-titles database if the database is outdated.
+//
+// Caches the contents on memory, which gets saved by DumpCaches.
+func UpdateTitles() error {
+ // too new, no need to update
+ if TitlesUpToDate() {
+ return nil
+ }
+
+ resp, err := http.Get(titles.DataDumpURL)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ return LoadTitles(resp.Body)
+}
--- /dev/null
+package anidb
+
+import (
+ "github.com/Kovensky/go-anidb/titles"
+)
+
+// Searches for the given anime name, case sensitive.
+//
+// Returns the match with the smallest AID.
+func SearchAnime(name string) AID {
+ rs := SearchAnimeAll(name).ResultsByAID()
+ if len(rs) == 0 {
+ return 0
+ }
+ return AID(rs[0].AID)
+}
+
+// Searches for all anime that match the given anime name, case sensitive.
+func SearchAnimeAll(name string) titles.ResultSet {
+ return titlesDB.FuzzySearch(name)
+}
+
+// Searches for the given anime name, case folding.
+//
+// Returns the match with the smallest AID.
+func SearchAnimeFold(name string) AID {
+ rs := SearchAnimeFoldAll(name).ResultsByAID()
+ if len(rs) == 0 {
+ return 0
+ }
+ return AID(rs[0].AID)
+}
+
+// Searches for all anime that match the given anime name, case folding.
+func SearchAnimeFoldAll(name string) titles.ResultSet {
+ return titlesDB.FuzzySearchFold(name)
+}
--- /dev/null
+package anidb
+
+import (
+ "github.com/Kovensky/go-anidb/udp"
+ "sync"
+ "time"
+)
+
+var banTime time.Time
+var banTimeLock sync.Mutex
+
+const banDuration = 30*time.Minute + 1*time.Second
+
+// Returns whether the last UDP API access returned a 555 BANNED message.
+func Banned() bool {
+ banTimeLock.Lock()
+ banTimeLock.Unlock()
+
+ return _banned()
+}
+
+func _banned() bool {
+ return time.Now().Sub(banTime) > banDuration
+}
+
+type paramSet struct {
+ cmd string
+ params paramMap
+ ch chan udpapi.APIReply
+}
+
+type udpWrap struct {
+ *udpapi.AniDBUDP
+
+ sendQueueCh chan paramSet
+
+ credentials *credentials
+ connected bool
+}
+
+func newUDPWrap() *udpWrap {
+ u := &udpWrap{
+ AniDBUDP: udpapi.NewAniDBUDP(),
+ sendQueueCh: make(chan paramSet, 10),
+ }
+ go u.sendQueue()
+ return u
+}
+
+type paramMap udpapi.ParamMap // shortcut
+
+type bannedAPIReply struct {
+ udpapi.APIReply
+}
+
+func (r *bannedAPIReply) Code() int {
+ return 555
+}
+func (r *bannedAPIReply) Text() string {
+ return "BANNED"
+}
+func (r *bannedAPIReply) Error() error {
+ return &udpapi.APIError{Code: 555, Desc: "BANNED"}
+}
+
+var bannedReply udpapi.APIReply = &bannedAPIReply{}
+
+func (udp *udpWrap) sendQueue() {
+ for set := range udp.sendQueueCh {
+ reply := <-udp.AniDBUDP.SendRecv(set.cmd, udpapi.ParamMap(set.params))
+
+ if reply.Error() == udpapi.TimeoutError {
+ // retry
+ go func(set paramSet) { udp.sendQueueCh <- set }(set)
+ continue
+ }
+
+ switch reply.Code() {
+ case 403, 501, 506: // not logged in, or session expired
+ if err := udp.ReAuth(); err == nil {
+ // retry
+ go func(set paramSet) { udp.sendQueueCh <- set }(set)
+ continue
+ }
+ case 503, 504: // client library rejected
+ panic(reply.Error())
+ case 555: // IP (and user, possibly client) temporarily banned
+ banTimeLock.Lock()
+
+ banTime = time.Now()
+
+ banTimeLock.Unlock()
+ }
+ set.ch <- reply
+ close(set.ch)
+ }
+}
+
+func (udp *udpWrap) SendRecv(cmd string, params paramMap) <-chan udpapi.APIReply {
+ ch := make(chan udpapi.APIReply, 1)
+
+ banTimeLock.Lock()
+ defer banTimeLock.Unlock()
+ if _banned() {
+ banTime = time.Time{}
+ } else {
+ ch <- bannedReply
+ close(ch)
+ return ch
+ }
+
+ udp.sendQueueCh <- paramSet{
+ cmd: cmd,
+ params: params,
+ ch: ch,
+ }
+
+ return ch
+}