Skip to content

Commit 73fdf87

Browse files
committed
Include next_page and prev_page metadata on search
As discussed in the May 30th team meeting (https://discordapp.com/channels/442252698964721669/448525639469891595/583749739166695444), this is the first step in deprecating/removing numbered pagination for the "all crates" endpoint. The remaining steps are: - Open issues/PRs on bots which have set a user agent that lets us identify them, have them start following this link - Change our frontend to show "next/prev page" links on the all crates page - Stop returning the "total" meta item when the next/prev page links will be cursor based - Switch the links over to cursor based pagination when applicable - Error when given pagination parameters that would cause very high offsets Since the problem we're addressing only occurs when the offset is greater than ~18k, the only place that we currently need to worry about this is "all crates". There isn't currently any filter you can apply that will return enough rows for this to be a problem (`?letter=` is the most likely, but honestly that is probably the least useful page/endpoint we have and it might just be worth removing it. Either way AFAIK it's not hit by crawlers and humans aren't visiting page 900 in their browsers, so even if we do end up with enough crates for the offset to possibly be too high, it's unlikely anyone will ever hit it there). I'm not entirely sure how I want to structure the final logic for "give me the next/previous page". I've just stuck it at the bottom of the search function for now. We'll want something more structured in the long term, but I'd like to see what this looks like with the cursor based option before pulling it out into a more general abstraction.
1 parent 293a3fb commit 73fdf87

File tree

6 files changed

+59
-3
lines changed

6 files changed

+59
-3
lines changed

Cargo.lock

+1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ futures = "0.1"
8282
tokio = "0.1"
8383
hyper = "0.12"
8484
ctrlc = { version = "3.0", features = ["termination"] }
85+
indexmap = "1.0.2"
8586

8687
[dev-dependencies]
8788
conduit-test = "0.8"

src/controllers.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,14 @@ mod prelude {
1515
use std::io;
1616

1717
use serde::Serialize;
18+
use indexmap::IndexMap;
1819
use url;
1920

2021
pub trait RequestUtils {
2122
fn redirect(&self, url: String) -> Response;
2223

2324
fn json<T: Serialize>(&self, t: &T) -> Response;
24-
fn query(&self) -> HashMap<String, String>;
25+
fn query(&self) -> IndexMap<String, String>;
2526
fn wants_json(&self) -> bool;
2627
fn pagination(&self, default: usize, max: usize) -> CargoResult<(i64, i64)>;
2728
}
@@ -31,7 +32,7 @@ mod prelude {
3132
crate::util::json_response(t)
3233
}
3334

34-
fn query(&self) -> HashMap<String, String> {
35+
fn query(&self) -> IndexMap<String, String> {
3536
url::form_urlencoded::parse(self.query_string().unwrap_or("").as_bytes())
3637
.map(|(a, b)| (a.into_owned(), b.into_owned()))
3738
.collect()

src/controllers/krate/search.rs

+26-1
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,29 @@ pub fn search(req: &mut dyn Request) -> CargoResult<Response> {
222222
)
223223
.collect();
224224

225+
let mut next_page = None;
226+
let mut prev_page = None;
227+
let page_num = params.get("page")
228+
.map(|s| s.parse())
229+
.unwrap_or(Ok(1))?;
230+
231+
let url_for_page = |num: i64| {
232+
let mut params = req.query();
233+
params.insert("page".into(), num.to_string());
234+
let query_string = url::form_urlencoded::Serializer::new(String::new())
235+
.clear()
236+
.extend_pairs(params)
237+
.finish();
238+
Some(format!("?{}", query_string))
239+
};
240+
241+
if offset + limit < total {
242+
next_page = url_for_page(page_num + 1);
243+
}
244+
if page_num != 1 {
245+
prev_page = url_for_page(page_num - 1);
246+
};
247+
225248
#[derive(Serialize)]
226249
struct R {
227250
crates: Vec<EncodableCrate>,
@@ -230,10 +253,12 @@ pub fn search(req: &mut dyn Request) -> CargoResult<Response> {
230253
#[derive(Serialize)]
231254
struct Meta {
232255
total: i64,
256+
next_page: Option<String>,
257+
prev_page: Option<String>,
233258
}
234259

235260
Ok(req.json(&R {
236261
crates,
237-
meta: Meta { total },
262+
meta: Meta { total, next_page, prev_page },
238263
}))
239264
}

src/tests/all.rs

+2
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,8 @@ pub struct CrateList {
9393
#[derive(Deserialize)]
9494
struct CrateMeta {
9595
total: i32,
96+
next_page: Option<String>,
97+
prev_page: Option<String>,
9698
}
9799
#[derive(Deserialize)]
98100
pub struct CrateResponse {

src/tests/krate.rs

+26
Original file line numberDiff line numberDiff line change
@@ -2155,3 +2155,29 @@ fn publish_rate_limit_doesnt_affect_existing_crates() {
21552155
token.enqueue_publish(new_version).good();
21562156
app.run_pending_background_jobs();
21572157
}
2158+
2159+
#[test]
2160+
fn pagination_links_included_if_applicable() {
2161+
let (app, anon, user) = TestApp::init().with_user();
2162+
let user = user.as_model();
2163+
2164+
app.db(|conn| {
2165+
CrateBuilder::new("pagination_links_1", user.id)
2166+
.expect_build(conn);
2167+
CrateBuilder::new("pagination_links_2", user.id)
2168+
.expect_build(conn);
2169+
CrateBuilder::new("pagination_links_3", user.id)
2170+
.expect_build(conn);
2171+
});
2172+
2173+
let page1 = anon.search("per_page=1");
2174+
let page2 = anon.search("page=2&per_page=1");
2175+
let page3 = anon.search("page=3&per_page=1");
2176+
2177+
assert_eq!(Some("?per_page=1&page=2".to_string()), page1.meta.next_page);
2178+
assert_eq!(None, page1.meta.prev_page);
2179+
assert_eq!(Some("?page=3&per_page=1".to_string()), page2.meta.next_page);
2180+
assert_eq!(Some("?page=1&per_page=1".to_string()), page2.meta.prev_page);
2181+
assert_eq!(None, page3.meta.next_page);
2182+
assert_eq!(Some("?page=2&per_page=1".to_string()), page3.meta.prev_page);
2183+
}

0 commit comments

Comments
 (0)