Skip to content
Snippets Groups Projects

Streaming decompression

Merged Jim Wallace requested to merge streaming-decompression into main
9 files
+ 346
122
Compare changes
  • Side-by-side
  • Inline
Files
9
@@ -44,8 +44,8 @@ public func downloadSubredditFromServer(subreddit: String, source: String = "htt
let submissionsURL = source + subreddit + "_submissions.zst"
let commentsURL = source + subreddit + "_comments.zst"
debugPrint("Downloading \(submissionsURL)")
debugPrint("Downloading \(commentsURL)")
//debugPrint("Downloading \(submissionsURL)")
//debugPrint("Downloading \(commentsURL)")
if let submissionsURL = URL(string: submissionsURL),
let commentsURL = URL(string: commentsURL) {
@@ -58,24 +58,24 @@ public func downloadSubredditFromServer(subreddit: String, source: String = "htt
// Once we have submissions data,
let _ = try await submissionsData
debugPrint("Processing submission data...")
//debugPrint("Processing submission data...")
let (submissions, _ ): ([Submission],[Data]) = try await loadFromRedditArchive(submissionsData, verbose: verbose) // TODO: Figure out what to do with error data
for submission in submissions {
// Create a new thread for each submission, index by submission ID
result["t3_\(submission.id!)"] = RedditThread(submission: submission, comments: [Comment]())
}
debugPrint("Completed processing submissions.")
//debugPrint("Completed processing submissions.")
// Then fill in the comments once we have them..
let _ = try await commentsData
debugPrint("Processing comments data...")
//debugPrint("Processing comments data...")
let (comments, _ ): ([Comment],[Data]) = try await loadFromRedditArchive(commentsData, verbose: verbose) // TODO: Figure out what to do with error data
for comment in comments {
if result[comment.link_id!] != nil {
result[comment.link_id!]!.add(comment)
}
}
debugPrint("Completed processing comments.")
//debugPrint("Completed processing comments.")
} catch {
print("Error downloading or loading data: \(error)")
Loading