From fcbb483d4a4e177c7dda7305b4cab7a759a298ac Mon Sep 17 00:00:00 2001 From: Honza Javorek Date: Thu, 31 Jul 2025 15:51:33 +0200 Subject: [PATCH] refactor: use for/of and .toArray() when looping over Cheerio selections Also fixes a few bugs I noticed when trying out the code. --- .../06_locating_elements.md | 38 +++++++------- .../07_extracting_data.md | 20 +++---- .../08_saving_data.md | 6 +-- .../09_getting_links.md | 17 +++--- .../10_crawling.md | 20 +++---- .../11_scraping_variants.md | 52 ++++++++----------- .../12_framework.md | 31 +++++------ 7 files changed, 86 insertions(+), 98 deletions(-) diff --git a/sources/academy/webscraping/scraping_basics_javascript2/06_locating_elements.md b/sources/academy/webscraping/scraping_basics_javascript2/06_locating_elements.md index 407ba9510e..9b210d5274 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/06_locating_elements.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/06_locating_elements.md @@ -23,20 +23,19 @@ const response = await fetch(url); if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - // highlight-next-line - $(".product-item").each((i, element) => { - // highlight-next-line + // highlight-start + for (const element of $(".product-item").toArray()) { console.log($(element).text()); - // highlight-next-line - }); + } + // highlight-end } else { throw new Error(`HTTP ${response.status}`); } ``` -We're using [`each()`](https://cheerio.js.org/docs/api/classes/Cheerio#each) to loop over the items in the Cheerio container. It calls the given function for each of the elements, with two arguments. The first is an index (0, 1, 2…), and the second is the element being processed. +Calling [`toArray()`](https://cheerio.js.org/docs/api/classes/Cheerio#toarray) converts the Cheerio selection to a standard JavaScript array. We can then loop over that array and process each selected element. -Cheerio requires us to wrap the element with `$()` again before we can work with it further, and then we call `.text()`. If we run the code, it… well, it definitely prints _something_… +Cheerio requires us to wrap each element with `$()` again before we can work with it further, and then we call `.text()`. If we run the code, it… well, it definitely prints _something_… ```text $ node index.js @@ -79,7 +78,7 @@ if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -89,7 +88,7 @@ if (response.ok) { const price = $price.text(); console.log(`${title} | ${price}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -175,7 +174,7 @@ if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -186,7 +185,7 @@ if (response.ok) { const price = $price.text(); console.log(`${title} | ${price}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -248,11 +247,11 @@ Djibouti const html = await response.text(); const $ = cheerio.load(html); - $(".wikitable").each((i, tableElement) => { + for (const tableElement of $(".wikitable").toArray()) { const $table = $(tableElement); const $rows = $table.find("tr"); - $rows.each((j, rowElement) => { + for (const rowElement of $rows.toArray()) { const $row = $(rowElement); const $cells = $row.find("td"); @@ -261,12 +260,11 @@ Djibouti const $link = $thirdColumn.find("a").first(); console.log($link.text()); } - }); - }); + } + } } else { throw new Error(`HTTP ${response.status}`); } - ``` Because some rows contain [table headers](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/th), we skip processing a row if `table_row.select("td")` doesn't find any [table data](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/td) cells. @@ -293,11 +291,11 @@ Simplify the code from previous exercise. Use a single for loop and a single CSS const html = await response.text(); const $ = cheerio.load(html); - $(".wikitable tr td:nth-child(3)").each((i, element) => { + for (const element of $(".wikitable tr td:nth-child(3)").toArray()) { const $nameCell = $(element); const $link = $nameCell.find("a").first(); console.log($link.text()); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -335,9 +333,9 @@ Max Verstappen wins Canadian Grand Prix: F1 – as it happened const html = await response.text(); const $ = cheerio.load(html); - $("#maincontent ul li h3").each((i, element) => { + for (const element of $("#maincontent ul li h3").toArray()) { console.log($(element).text()); - }); + } } else { throw new Error(`HTTP ${response.status}`); } diff --git a/sources/academy/webscraping/scraping_basics_javascript2/07_extracting_data.md b/sources/academy/webscraping/scraping_basics_javascript2/07_extracting_data.md index d393e41185..7ca821eef8 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/07_extracting_data.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/07_extracting_data.md @@ -70,7 +70,7 @@ if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -87,7 +87,7 @@ if (response.ok) { } console.log(`${title} | ${priceRange.minPrice} | ${priceRange.price}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -177,7 +177,7 @@ if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -200,7 +200,7 @@ if (response.ok) { } console.log(`${title} | ${priceRange.minPrice} | ${priceRange.price}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -258,7 +258,7 @@ Denon AH-C720 In-Ear Headphones | 236 const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const title = $productItem.find(".product-item__title"); @@ -268,7 +268,7 @@ Denon AH-C720 In-Ear Headphones | 236 const unitsCount = parseUnitsText(unitsText); console.log(`${title} | ${unitsCount}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -307,7 +307,7 @@ Simplify the code from previous exercise. Use [regular expressions](https://deve const html = await response.text(); const $ = cheerio.load(html); - $(".product-item").each((i, element) => { + for (const element of $(".product-item").toArray()) { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -317,7 +317,7 @@ Simplify the code from previous exercise. Use [regular expressions](https://deve const unitsCount = parseUnitsText(unitsText); console.log(`${title} | ${unitsCount}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -369,7 +369,7 @@ Hints: const html = await response.text(); const $ = cheerio.load(html); - $("#maincontent ul li").each((i, element) => { + for (const element of $("#maincontent ul li").toArray()) { const $article = $(element); const title = $article @@ -383,7 +383,7 @@ Hints: const date = new Date(dateText); console.log(`${title} | ${date.toDateString()}`); - }); + } } else { throw new Error(`HTTP ${response.status}`); } diff --git a/sources/academy/webscraping/scraping_basics_javascript2/08_saving_data.md b/sources/academy/webscraping/scraping_basics_javascript2/08_saving_data.md index 8357fc2be3..6fb45d9dd8 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/08_saving_data.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/08_saving_data.md @@ -38,7 +38,7 @@ if (response.ok) { const $ = cheerio.load(html); // highlight-next-line - const $items = $(".product-item").map((i, element) => { + const data = $(".product-item").toArray().map(element => { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -64,15 +64,13 @@ if (response.ok) { return { title, ...priceRange }; }); // highlight-next-line - const data = $items.get(); - // highlight-next-line console.log(data); } else { throw new Error(`HTTP ${response.status}`); } ``` -Instead of printing each line, we now return the data for each product as a JavaScript object. We've replaced `.each()` with [`.map()`](https://cheerio.js.org/docs/api/classes/Cheerio#map-3), which also iterates over the selection but, in addition, collects all the results and returns them as a Cheerio collection. We then convert it into a standard JavaScript array by calling [`.get()`](https://cheerio.js.org/docs/api/classes/Cheerio#call-signature-32). Near the end of the program, we print the entire array. +Instead of printing each line, we now return the data for each product as a JavaScript object. We've replaced the `for` loop with [`.map()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map), which also iterates over the selection but, in addition, collects all the results and returns them as another array. Near the end of the program, we print this entire array. :::tip Advanced syntax diff --git a/sources/academy/webscraping/scraping_basics_javascript2/09_getting_links.md b/sources/academy/webscraping/scraping_basics_javascript2/09_getting_links.md index db62d6394a..bf8e297145 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/09_getting_links.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/09_getting_links.md @@ -43,7 +43,7 @@ if (response.ok) { const html = await response.text(); const $ = cheerio.load(html); - const $items = $(".product-item").map((i, element) => { + const data = $(".product-item").toArray().map(element => { const $productItem = $(element); const $title = $productItem.find(".product-item__title"); @@ -67,7 +67,6 @@ if (response.ok) { return { title, ...priceRange }; }); - const data = $items.get(); const jsonData = JSON.stringify(data); await writeFile('products.json', jsonData); @@ -190,12 +189,11 @@ async function exportCSV(data) { const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $items = $(".product-item").map((i, element) => { +const data = $(".product-item").toArray().map(element => { const $productItem = $(element); const item = parseProduct($productItem); return item; }); -const data = $items.get(); await writeFile('products.json', exportJSON(data)); await writeFile('products.csv', await exportCSV(data)); @@ -286,13 +284,12 @@ Now we'll pass the base URL to the function in the main body of our program: const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $items = $(".product-item").map((i, element) => { +const data = $(".product-item").toArray().map(element => { const $productItem = $(element); // highlight-next-line const item = parseProduct($productItem, listingURL); return item; }); -const data = $items.get(); ``` When we run the scraper now, we should see full URLs in our exports: @@ -353,12 +350,12 @@ https://en.wikipedia.org/wiki/Botswana const html = await response.text(); const $ = cheerio.load(html); - $(".wikitable tr td:nth-child(3)").each((i, element) => { + for (const element of $(".wikitable tr td:nth-child(3)").toArray()) { const nameCell = $(element); const link = nameCell.find("a").first(); const url = new URL(link.attr("href"), listingURL).href; console.log(url); - }); + } } else { throw new Error(`HTTP ${response.status}`); } @@ -397,11 +394,11 @@ https://www.theguardian.com/sport/article/2024/sep/02/max-verstappen-damns-his-u const html = await response.text(); const $ = cheerio.load(html); - $("#maincontent ul li").each((i, element) => { + for (const element of $("#maincontent ul li").toArray()) { const link = $(element).find("a").first(); const url = new URL(link.attr("href"), listingURL).href; console.log(url); - }); + } } else { throw new Error(`HTTP ${response.status}`); } diff --git a/sources/academy/webscraping/scraping_basics_javascript2/10_crawling.md b/sources/academy/webscraping/scraping_basics_javascript2/10_crawling.md index 10546ab2f2..513873f98a 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/10_crawling.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/10_crawling.md @@ -67,13 +67,12 @@ async function exportCSV(data) { const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $items = $(".product-item").map((i, element) => { +const data = $(".product-item").toArray().map(element => { const $productItem = $(element); // highlight-next-line const item = parseProduct($productItem, listingURL); return item; }); -const data = $items.get(); await writeFile('products.json', exportJSON(data)); await writeFile('products.csv', await exportCSV(data)); @@ -131,20 +130,20 @@ But where do we put this line in our program? In the `.map()` loop, we're already going through all the products. Let's expand it to include downloading the product detail page, parsing it, extracting the vendor's name, and adding it to the item object. -First, we need to make the loop asynchronous so that we can use `await download()` for each product. We'll add the `async` keyword to the inner function and rename the collection to `$promises`, since it will now store promises that resolve to items rather than the items themselves. We'll still convert the collection to a standard JavaScript array, but this time we'll pass it to `await Promise.all()` to resolve all the promises and retrieve the actual items. +First, we need to make the loop asynchronous so that we can use `await download()` for each product. We'll add the `async` keyword to the inner function and rename the collection to `promises`, since it will now store promises that resolve to items rather than the items themselves. We'll pass it to `await Promise.all()` to resolve all the promises and retrieve the actual items. ```js const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); // highlight-next-line -const $promises = $(".product-item").map(async (i, element) => { +const promises = $(".product-item").toArray().map(async element => { const $productItem = $(element); const item = parseProduct($productItem, listingURL); return item; }); // highlight-next-line -const data = await Promise.all($promises.get()); +const data = await Promise.all(promises); ``` The program behaves the same as before, but now the code is prepared to make HTTP requests from within the inner function. Let's do it: @@ -153,7 +152,7 @@ The program behaves the same as before, but now the code is prepared to make HTT const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $promises = $(".product-item").map(async (i, element) => { +const promises = $(".product-item").toArray().map(async element => { const $productItem = $(element); const item = parseProduct($productItem, listingURL); @@ -248,7 +247,8 @@ Hint: Locating cells in tables is sometimes easier if you know how to [filter](h const listingURL = "https://en.wikipedia.org/wiki/List_of_sovereign_states_and_dependent_territories_in_Africa"; const $ = await download(listingURL); - const $promises = $(".wikitable tr td:nth-child(3)").map(async (i, element) => { + const $cells = $(".wikitable tr td:nth-child(3)"); + const promises = $cells.toArray().map(async element => { const $nameCell = $(element); const $link = $nameCell.find("a").first(); const countryURL = new URL($link.attr("href"), listingURL).href; @@ -266,7 +266,7 @@ Hint: Locating cells in tables is sometimes easier if you know how to [filter](h console.log(`${countryURL} ${callingCode || null}`); }); - await Promise.all($promises.get()); + await Promise.all(promises); ``` @@ -314,7 +314,7 @@ Hints: const listingURL = "https://www.theguardian.com/sport/formulaone"; const $ = await download(listingURL); - const $promises = $("#maincontent ul li").map(async (i, element) => { + const promises = $("#maincontent ul li").toArray().map(async element => { const $item = $(element); const $link = $item.find("a").first(); const authorURL = new URL($link.attr("href"), listingURL).href; @@ -327,7 +327,7 @@ Hints: console.log(`${author || address || null}: ${title}`); }); - await Promise.all($promises.get()); + await Promise.all(promises); ``` diff --git a/sources/academy/webscraping/scraping_basics_javascript2/11_scraping_variants.md b/sources/academy/webscraping/scraping_basics_javascript2/11_scraping_variants.md index 48a64b389d..635205488d 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/11_scraping_variants.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/11_scraping_variants.md @@ -75,7 +75,7 @@ Using our knowledge of Cheerio, we can locate the `option` elements and extract const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales"; const $ = await download(listingURL); -const $promises = $(".product-item").map(async (i, element) => { +const promises = $(".product-item").toArray().map(async element => { const $productItem = $(element); const item = parseProduct($productItem, listingURL); @@ -83,8 +83,9 @@ const $promises = $(".product-item").map(async (i, element) => { item.vendor = $p(".product-meta__vendor").text().trim(); // highlight-start - const $items = $p(".product-form__option.no-js option").map((j, element) => { - const $option = $(element); + const $options = $p(".product-form__option.no-js option"); + const items = $options.toArray().map(optionElement => { + const $option = $(optionElement); const variantName = $option.text().trim(); return { variantName, ...item }; }); @@ -92,12 +93,12 @@ const $promises = $(".product-item").map(async (i, element) => { return item; }); -const data = await Promise.all($promises.get()); +const data = await Promise.all(promises); ``` The CSS selector `.product-form__option.no-js` targets elements that have both the `product-form__option` and `no-js` classes. We then use the [descendant combinator](https://developer.mozilla.org/en-US/docs/Web/CSS/Descendant_combinator) to match all `option` elements nested within the `.product-form__option.no-js` wrapper. -We loop over the variants using Cheerio's `.map()` method to create a collection of item copies for each `variantName`. We now need to pass all these items onward, but the function currently returns just one item per product. And what if there are no variants? +We loop over the variants using `.map()` method to create an array of item copies for each `variantName`. We now need to pass all these items onward, but the function currently returns just one item per product. And what if there are no variants? Let's adjust the loop so it returns a promise that resolves to an array of items instead of a single item. If a product has no variants, we'll return an array with a single item, setting `variantName` to `null`: @@ -105,28 +106,24 @@ Let's adjust the loop so it returns a promise that resolves to an array of items const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $promises = $(".product-item").map(async (i, element) => { +const promises = $(".product-item").toArray().map(async element => { const $productItem = $(element); const item = parseProduct($productItem, listingURL); const $p = await download(item.url); item.vendor = $p(".product-meta__vendor").text().trim(); - const $items = $p(".product-form__option.no-js option").map((j, element) => { - const $option = $(element); + const $options = $p(".product-form__option.no-js option"); + const items = $options.toArray().map(optionElement => { + const $option = $(optionElement); const variantName = $option.text().trim(); return { variantName, ...item }; }); - - // highlight-start - if ($items.length > 0) { - return $items.get(); - } - return [{ variantName: null, ...item }]; - // highlight-end + // highlight-next-line + return items.length > 0 ? items : [{ variantName: null, ...item }]; }); // highlight-start -const itemLists = await Promise.all($promises.get()); +const itemLists = await Promise.all(promises); const data = itemLists.flat(); // highlight-end ``` @@ -288,26 +285,23 @@ function parseVariant($option) { const listingURL = "https://warehouse-theme-metal.myshopify.com/collections/sales" const $ = await download(listingURL); -const $promises = $(".product-item").map(async (i, element) => { +const promises = $(".product-item").toArray().map(async element => { const $productItem = $(element); const item = parseProduct($productItem, listingURL); const $p = await download(item.url); item.vendor = $p(".product-meta__vendor").text().trim(); - const $items = $p(".product-form__option.no-js option").map((j, element) => { + const $options = $p(".product-form__option.no-js option"); + const items = $options.toArray().map(optionElement => { // highlight-next-line - const variant = parseVariant($(element)); + const variant = parseVariant($(optionElement)); // highlight-next-line return { ...item, ...variant }; }); - - if ($items.length > 0) { - return $items.get(); - } - return [{ variantName: null, ...item }]; + return items.length > 0 ? items : [{ variantName: null, ...item }]; }); -const itemLists = await Promise.all($promises.get()); +const itemLists = await Promise.all(promises); const data = itemLists.flat(); await writeFile('products.json', await exportJSON(data)); @@ -406,7 +400,7 @@ Your output should look something like this: const listingURL = "https://www.npmjs.com/search?page=0&q=keywords%3Allm&sortBy=dependent_count"; const $ = await download(listingURL); - const $promises = $("section").map(async (i, element) => { + const promises = $("section").toArray().map(async element => { const $card = $(element); const details = $card @@ -442,7 +436,7 @@ Your output should look something like this: return { name, url, description, dependents, downloads }; }); - const data = await Promise.all($promises.get()); + const data = await Promise.all(promises); console.log(data.filter(item => item !== null).splice(0, 5)); ``` @@ -483,7 +477,7 @@ At the time of writing, the shortest article on the CNN Sports homepage is [abou const listingURL = "https://edition.cnn.com/sport"; const $ = await download(listingURL); - const $promises = $(".layout__main .card").map(async (i, element) => { + const promises = $(".layout__main .card").toArray().map(async element => { const $link = $(element).find("a").first(); const articleURL = new URL($link.attr("href"), listingURL).href; @@ -493,7 +487,7 @@ At the time of writing, the shortest article on the CNN Sports homepage is [abou return { url: articleURL, length: content.length }; }); - const data = await Promise.all($promises.get()); + const data = await Promise.all(promises); const nonZeroData = data.filter(({ url, length }) => length > 0); nonZeroData.sort((a, b) => a.length - b.length); const shortestItem = nonZeroData[0]; diff --git a/sources/academy/webscraping/scraping_basics_javascript2/12_framework.md b/sources/academy/webscraping/scraping_basics_javascript2/12_framework.md index 1f13a5a792..bfa205fb40 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/12_framework.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/12_framework.md @@ -223,10 +223,10 @@ const crawler = new CheerioCrawler({ if ($variants.length === 0) { log.info("Item scraped", item); } else { - $variants.each((i, element) => { + for (const element of $variants.toArray()) { const variant = parseVariant($(element)); log.info("Item scraped", { ...item, ...variant }); - }); + } } // highlight-end } else { @@ -258,11 +258,11 @@ const crawler = new CheerioCrawler({ // highlight-next-line pushData(item); } else { - $variants.each((i, element) => { + for (const element of $variants.toArray()) { const variant = parseVariant($(element)); // highlight-next-line pushData({ ...item, ...variant }); - }); + } } } else { ... @@ -342,12 +342,12 @@ const crawler = new CheerioCrawler({ log.info('Saving a product'); pushData(item); } else { - $variants.each((i, element) => { + for (const element of $variants.toArray()) { const variant = parseVariant($(element)); // highlight-next-line log.info('Saving a product variant'); pushData({ ...item, ...variant }); - }); + } } } else { // highlight-next-line @@ -425,15 +425,15 @@ Hints: async requestHandler({ $, request, enqueueLinks, pushData }) { if (request.label === 'DRIVER') { const info = {}; - $('.common-driver-info li').each((i, listItem) => { - const name = $(listItem).find('span').text().trim(); - const value = $(listItem).find('h4').text().trim(); + for (const itemElement of $('.common-driver-info li').toArray()) { + const name = $(itemElement).find('span').text().trim(); + const value = $(itemElement).find('h4').text().trim(); info[name] = value; - }); + } const detail = {}; - $('.driver-detail--cta-group a').each((i, link) => { - const name = $(link).find('p').text().trim(); - const value = $(link).find('h2').text().trim(); + for (const linkElement of $('.driver-detail--cta-group a').toArray()) { + const name = $(linkElement).find('p').text().trim(); + const value = $(linkElement).find('h2').text().trim(); detail[name] = value; }); const [dobDay, dobMonth, dobYear] = info['DOB'].split("/"); @@ -527,8 +527,9 @@ When navigating to the first IMDb search result, you might find it helpful to kn } else if (request.label === 'NETFLIX') { // handle Netflix table - const $requests = $('[data-uia="top10-table-row-title"] button').map((i, nameButton) => { - const name = $(nameButton).text().trim(); + const $buttons = $('[data-uia="top10-table-row-title"] button'); + const requests = $buttons.toArray().map(buttonElement => { + const name = $(buttonElement).text().trim(); const imdbSearchUrl = `https://www.imdb.com/find/?q=${escape(name)}&s=tt&ttype=ft`; return new Request({ url: imdbSearchUrl, label: 'IMDB_SEARCH' }); });