Skip to content

Commit b5e8905

Browse files
feat(api): api update
1 parent 04fafa1 commit b5e8905

5 files changed

Lines changed: 75 additions & 5 deletions

File tree

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 20
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/brand-dev%2Fbrand.dev-e7d0d9b555ca50f58e651f918238ee1329755f46cb962feaedec097be4c28d83.yml
3-
openapi_spec_hash: b187dde21c352dd66cf249fbc14dd0b4
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/brand-dev%2Fbrand.dev-4bc00d073aa6c38b3299c9fc2e197ba9e25df5defca2508a0a0c83bf08237b00.yml
3+
openapi_spec_hash: b68c73667402fd727825555413522ce6
44
config_hash: 91cf2dcefb99c39eb9cd3e98e15d6011

src/brand/dev/resources/brand.py

Lines changed: 36 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2376,6 +2376,7 @@ def web_scrape_html(
23762376
self,
23772377
*,
23782378
url: str,
2379+
max_age_ms: int | Omit = omit,
23792380
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
23802381
# The extra values given here take precedence over values defined on the client or passed to this method.
23812382
extra_headers: Headers | None = None,
@@ -2389,6 +2390,10 @@ def web_scrape_html(
23892390
Args:
23902391
url: Full URL to scrape (must include http:// or https:// protocol)
23912392
2393+
max_age_ms: Return a cached result if a prior scrape for the same parameters exists and is
2394+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
2395+
omitted. Set to 0 to always scrape fresh.
2396+
23922397
extra_headers: Send extra headers
23932398
23942399
extra_query: Add additional query parameters to the request
@@ -2404,7 +2409,13 @@ def web_scrape_html(
24042409
extra_query=extra_query,
24052410
extra_body=extra_body,
24062411
timeout=timeout,
2407-
query=maybe_transform({"url": url}, brand_web_scrape_html_params.BrandWebScrapeHTMLParams),
2412+
query=maybe_transform(
2413+
{
2414+
"url": url,
2415+
"max_age_ms": max_age_ms,
2416+
},
2417+
brand_web_scrape_html_params.BrandWebScrapeHTMLParams,
2418+
),
24082419
),
24092420
cast_to=BrandWebScrapeHTMLResponse,
24102421
)
@@ -2455,6 +2466,7 @@ def web_scrape_md(
24552466
url: str,
24562467
include_images: bool | Omit = omit,
24572468
include_links: bool | Omit = omit,
2469+
max_age_ms: int | Omit = omit,
24582470
shorten_base64_images: bool | Omit = omit,
24592471
use_main_content_only: bool | Omit = omit,
24602472
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -2476,6 +2488,10 @@ def web_scrape_md(
24762488
24772489
include_links: Preserve hyperlinks in Markdown output
24782490
2491+
max_age_ms: Return a cached result if a prior scrape for the same parameters exists and is
2492+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
2493+
omitted. Set to 0 to always scrape fresh.
2494+
24792495
shorten_base64_images: Shorten base64-encoded image data in the Markdown output
24802496
24812497
use_main_content_only: Extract only the main content of the page, excluding headers, footers, sidebars,
@@ -2501,6 +2517,7 @@ def web_scrape_md(
25012517
"url": url,
25022518
"include_images": include_images,
25032519
"include_links": include_links,
2520+
"max_age_ms": max_age_ms,
25042521
"shorten_base64_images": shorten_base64_images,
25052522
"use_main_content_only": use_main_content_only,
25062523
},
@@ -4873,6 +4890,7 @@ async def web_scrape_html(
48734890
self,
48744891
*,
48754892
url: str,
4893+
max_age_ms: int | Omit = omit,
48764894
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
48774895
# The extra values given here take precedence over values defined on the client or passed to this method.
48784896
extra_headers: Headers | None = None,
@@ -4886,6 +4904,10 @@ async def web_scrape_html(
48864904
Args:
48874905
url: Full URL to scrape (must include http:// or https:// protocol)
48884906
4907+
max_age_ms: Return a cached result if a prior scrape for the same parameters exists and is
4908+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
4909+
omitted. Set to 0 to always scrape fresh.
4910+
48894911
extra_headers: Send extra headers
48904912
48914913
extra_query: Add additional query parameters to the request
@@ -4901,7 +4923,13 @@ async def web_scrape_html(
49014923
extra_query=extra_query,
49024924
extra_body=extra_body,
49034925
timeout=timeout,
4904-
query=await async_maybe_transform({"url": url}, brand_web_scrape_html_params.BrandWebScrapeHTMLParams),
4926+
query=await async_maybe_transform(
4927+
{
4928+
"url": url,
4929+
"max_age_ms": max_age_ms,
4930+
},
4931+
brand_web_scrape_html_params.BrandWebScrapeHTMLParams,
4932+
),
49054933
),
49064934
cast_to=BrandWebScrapeHTMLResponse,
49074935
)
@@ -4954,6 +4982,7 @@ async def web_scrape_md(
49544982
url: str,
49554983
include_images: bool | Omit = omit,
49564984
include_links: bool | Omit = omit,
4985+
max_age_ms: int | Omit = omit,
49574986
shorten_base64_images: bool | Omit = omit,
49584987
use_main_content_only: bool | Omit = omit,
49594988
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -4975,6 +5004,10 @@ async def web_scrape_md(
49755004
49765005
include_links: Preserve hyperlinks in Markdown output
49775006
5007+
max_age_ms: Return a cached result if a prior scrape for the same parameters exists and is
5008+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
5009+
omitted. Set to 0 to always scrape fresh.
5010+
49785011
shorten_base64_images: Shorten base64-encoded image data in the Markdown output
49795012
49805013
use_main_content_only: Extract only the main content of the page, excluding headers, footers, sidebars,
@@ -5000,6 +5033,7 @@ async def web_scrape_md(
50005033
"url": url,
50015034
"include_images": include_images,
50025035
"include_links": include_links,
5036+
"max_age_ms": max_age_ms,
50035037
"shorten_base64_images": shorten_base64_images,
50045038
"use_main_content_only": use_main_content_only,
50055039
},

src/brand/dev/types/brand_web_scrape_html_params.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,20 @@
22

33
from __future__ import annotations
44

5-
from typing_extensions import Required, TypedDict
5+
from typing_extensions import Required, Annotated, TypedDict
6+
7+
from .._utils import PropertyInfo
68

79
__all__ = ["BrandWebScrapeHTMLParams"]
810

911

1012
class BrandWebScrapeHTMLParams(TypedDict, total=False):
1113
url: Required[str]
1214
"""Full URL to scrape (must include http:// or https:// protocol)"""
15+
16+
max_age_ms: Annotated[int, PropertyInfo(alias="maxAgeMs")]
17+
"""
18+
Return a cached result if a prior scrape for the same parameters exists and is
19+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
20+
omitted. Set to 0 to always scrape fresh.
21+
"""

src/brand/dev/types/brand_web_scrape_md_params.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,13 @@ class BrandWebScrapeMdParams(TypedDict, total=False):
2222
include_links: Annotated[bool, PropertyInfo(alias="includeLinks")]
2323
"""Preserve hyperlinks in Markdown output"""
2424

25+
max_age_ms: Annotated[int, PropertyInfo(alias="maxAgeMs")]
26+
"""
27+
Return a cached result if a prior scrape for the same parameters exists and is
28+
younger than this many milliseconds. Defaults to 1 day (86400000 ms) when
29+
omitted. Set to 0 to always scrape fresh.
30+
"""
31+
2532
shorten_base64_images: Annotated[bool, PropertyInfo(alias="shortenBase64Images")]
2633
"""Shorten base64-encoded image data in the Markdown output"""
2734

tests/api_resources/test_brand.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -840,6 +840,15 @@ def test_method_web_scrape_html(self, client: BrandDev) -> None:
840840
)
841841
assert_matches_type(BrandWebScrapeHTMLResponse, brand, path=["response"])
842842

843+
@pytest.mark.skip(reason="Mock server tests are disabled")
844+
@parametrize
845+
def test_method_web_scrape_html_with_all_params(self, client: BrandDev) -> None:
846+
brand = client.brand.web_scrape_html(
847+
url="https://example.com",
848+
max_age_ms=0,
849+
)
850+
assert_matches_type(BrandWebScrapeHTMLResponse, brand, path=["response"])
851+
843852
@pytest.mark.skip(reason="Mock server tests are disabled")
844853
@parametrize
845854
def test_raw_response_web_scrape_html(self, client: BrandDev) -> None:
@@ -915,6 +924,7 @@ def test_method_web_scrape_md_with_all_params(self, client: BrandDev) -> None:
915924
url="https://example.com",
916925
include_images=True,
917926
include_links=True,
927+
max_age_ms=0,
918928
shorten_base64_images=True,
919929
use_main_content_only=True,
920930
)
@@ -1797,6 +1807,15 @@ async def test_method_web_scrape_html(self, async_client: AsyncBrandDev) -> None
17971807
)
17981808
assert_matches_type(BrandWebScrapeHTMLResponse, brand, path=["response"])
17991809

1810+
@pytest.mark.skip(reason="Mock server tests are disabled")
1811+
@parametrize
1812+
async def test_method_web_scrape_html_with_all_params(self, async_client: AsyncBrandDev) -> None:
1813+
brand = await async_client.brand.web_scrape_html(
1814+
url="https://example.com",
1815+
max_age_ms=0,
1816+
)
1817+
assert_matches_type(BrandWebScrapeHTMLResponse, brand, path=["response"])
1818+
18001819
@pytest.mark.skip(reason="Mock server tests are disabled")
18011820
@parametrize
18021821
async def test_raw_response_web_scrape_html(self, async_client: AsyncBrandDev) -> None:
@@ -1872,6 +1891,7 @@ async def test_method_web_scrape_md_with_all_params(self, async_client: AsyncBra
18721891
url="https://example.com",
18731892
include_images=True,
18741893
include_links=True,
1894+
max_age_ms=0,
18751895
shorten_base64_images=True,
18761896
use_main_content_only=True,
18771897
)

0 commit comments

Comments
 (0)