OK OK my test worked locally it seems but not via drone. No idea why – at this point I don’t much care long as they work for me. Now code changes to encode and decode tweets, efficiency be damned – lets just move on.
https://dev.twitter.com/docs/working-with-timelines
Tweets returned in reverse chronological order – so not optimal for writing to a file.
func (tw *twitter) collect(toGet string) (string, error) {
var searchResult []anaconda.Tweet
var err error
var file *os.File
v := url.Values{}
v.Set("count", "20")
fileName := strings.Join([]string{toGet, "-", strconv.Itoa(time.Now().Day()), ".json"}, "")
file, err = os.OpenFile(fileName, os.O_RDWR, 0666)
if err != nil {
if file, err = os.Create(fileName); err != nil {
err = errors.New(co.Lang("could neither open or create the file to save tweets to"))
}
} else {
/* read the first tweet to get the ID then use v.Set to add this to the url query
based on https://dev.twitter.com/docs/working-with-timelines
max_id is to be set on subsequent tweet requests with the lowest id present
Subtract 1 from the lowest Tweet ID returned from the previous request and use this for the value of max_id.
It does not matter if this adjusted max_id is a valid Tweet ID, or if it corresponds with a Tweet posted by a different user -
the value is just used to decide which Tweets to filter. When adjusted in this manner, it is possible to page through a
timeline without receiving redundant Tweets
Technically for my work it doesn't matter if some tweets go into an incorrect date file - it is not that iomportant and will be
addressed at analysis time.
*/
dec := json.NewDecoder(file)
var tweet anaconda.Tweet
var sinceID int64
err := dec.Decode(&tweet)
sinceID = tweet.Id
for ; err != io.EOF; err = dec.Decode(&tweet) { //There must be a more efficient way of getting a tweet than this!
if err != nil {
err = errors.New(co.Lang(strings.Join([]string{"problem decoding tweet from json file", fileName}, " ")))
break
}
if sinceID < tweet.Id {
sinceID = tweet.Id
}
}
v.Set("since_id", strconv.FormatInt(sinceID, 10))
}
if err == nil {
anaconda.SetConsumerKey(tw.userKey)
anaconda.SetConsumerSecret(tw.userSecret)
api := anaconda.NewTwitterApi(tw.apiKey, tw.apiSecret)
switch toGet[0] {
case '@':
v.Set("screen_name", toGet[1:len(toGet)])
searchResult, err = api.GetUserTimeline(v) //Names Feed
case '#':
slice := toGet[1:len(toGet)]
searchResult, err = api.GetSearch(slice, v) //Tag Feed
default:
searchResult, err = api.GetHomeTimeline(v) //Home Feed
}
if err != nil {
err = errors.New(co.Lang("could not connect to twitter"))
} else {
file.Seek(0, 0)
enc := json.NewEncoder(file)
for i, tweet := range searchResult {
if err := enc.Encode(&tweet); err != nil {
err = errors.New(co.Lang(strings.Join([]string{"problem encoding tweet", strconv.Itoa(i), "to json file", fileName}, " ")))
} else {
file.Sync()
}
}
}
}
file.Close()
return v.Get("since_id"), err
}