using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using RedditSharp.Things; namespace RedditSharp { public class Listing : IEnumerable where T : Thing { /// /// Gets the default number of listings returned per request /// internal const int DefaultListingPerRequest = 25; private IWebAgent WebAgent { get; set; } private Reddit Reddit { get; set; } private string Url { get; set; } /// /// Creates a new Listing instance /// /// /// /// internal Listing(Reddit reddit, string url, IWebAgent webAgent) { WebAgent = webAgent; Reddit = reddit; Url = url; } /// /// Returns an enumerator that iterates through a collection, using the specified number of listings per /// request and optionally the maximum number of listings /// /// The number of listings to be returned per request /// The maximum number of listings to return /// public IEnumerator GetEnumerator(int limitPerRequest, int maximumLimit = -1) { return new ListingEnumerator(this, limitPerRequest, maximumLimit); } /// /// Returns an enumerator that iterates through a collection, using the default number of listings per request /// /// public IEnumerator GetEnumerator() { return GetEnumerator(DefaultListingPerRequest); } /// /// Returns an enumerator that iterates through a collection /// /// System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } /// /// Returns an IEnumerable instance which will return the specified maximum number of listings /// /// /// public IEnumerable GetListing(int maximumLimit) { return GetListing(maximumLimit, DefaultListingPerRequest); } /// /// Returns an IEnumerable instance which will return the specified maximum number of listings /// with the limited number per request /// /// /// /// public IEnumerable GetListing(int maximumLimit, int limitPerRequest) { // Get the enumerator with the specified maximum and per request limits var enumerator = GetEnumerator(limitPerRequest, maximumLimit); return GetEnumerator(enumerator); } /// /// Converts an IEnumerator instance to an IEnumerable /// /// /// private static IEnumerable GetEnumerator(IEnumerator enumerator) { while (enumerator.MoveNext()) { yield return enumerator.Current; } } #pragma warning disable 0693 private class ListingEnumerator : IEnumerator where T : Thing { private Listing Listing { get; set; } private int CurrentPageIndex { get; set; } private string After { get; set; } private string Before { get; set; } private Thing[] CurrentPage { get; set; } private int Count { get; set; } private int LimitPerRequest { get; set; } private int MaximumLimit { get; set; } /// /// Creates a new ListingEnumerator instance /// /// /// The number of listings to be returned per request. -1 will exclude this parameter and use the Reddit default (25) /// The maximum number of listings to return, -1 will not add a limit public ListingEnumerator(Listing listing, int limitPerRequest, int maximumLimit) { Listing = listing; CurrentPageIndex = -1; CurrentPage = new Thing[0]; // Set the listings per page (if not specified, use the Reddit default of 25) and the maximum listings LimitPerRequest = (limitPerRequest <= 0 ? DefaultListingPerRequest : limitPerRequest); MaximumLimit = maximumLimit; } public T Current { get { return (T)CurrentPage[CurrentPageIndex]; } } private void FetchNextPage() { var url = Listing.Url; if (After != null) { url += (url.Contains("?") ? "&" : "?") + "after=" + After; } if (LimitPerRequest != -1) { int limit = LimitPerRequest; if (limit > MaximumLimit) { // If the limit is more than the maximum number of listings, adjust limit = MaximumLimit; } else if (Count + limit > MaximumLimit) { // If a smaller subset of listings are needed, adjust the limit limit = MaximumLimit - Count; } if (limit > 0) { // Add the limit, the maximum number of items to be returned per page url += (url.Contains("?") ? "&" : "?") + "limit=" + limit; } } if (Count > 0) { // Add the count, the number of items already seen in this listing // The Reddit API uses this to determine when to give values for before and after fields url += (url.Contains("?") ? "&" : "?") + "count=" + Count; } var request = Listing.WebAgent.CreateGet(url); var response = request.GetResponse(); var data = Listing.WebAgent.GetResponseString(response.GetResponseStream()); var json = JToken.Parse(data); if (json["kind"].ValueOrDefault() != "Listing") throw new FormatException("Reddit responded with an object that is not a listing."); Parse(json); } private void Parse(JToken json) { var children = json["data"]["children"] as JArray; CurrentPage = new Thing[children.Count]; for (int i = 0; i < CurrentPage.Length; i++) CurrentPage[i] = Thing.Parse(Listing.Reddit, children[i], Listing.WebAgent); // Increase the total count of items returned Count += CurrentPage.Length; After = json["data"]["after"].Value(); Before = json["data"]["before"].Value(); } public void Dispose() { // ... } object System.Collections.IEnumerator.Current { get { return Current; } } public bool MoveNext() { CurrentPageIndex++; if (CurrentPageIndex == CurrentPage.Length) { if (After == null && CurrentPageIndex != 0) { // No more pages to return return false; } if (MaximumLimit != -1 && Count >= MaximumLimit) { // Maximum listing count returned return false; } // Get the next page FetchNextPage(); CurrentPageIndex = 0; if (CurrentPage.Length == 0) { // No listings were returned in the page return false; } } return true; } public void Reset() { After = Before = null; CurrentPageIndex = -1; CurrentPage = new Thing[0]; } } #pragma warning restore } }