Skip to content

Instantly share code, notes, and snippets.

@feliche93
Created August 28, 2023 09:10
Show Gist options
  • Save feliche93/6fb15624089b062cc7a26f98e31c606a to your computer and use it in GitHub Desktop.
Save feliche93/6fb15624089b062cc7a26f98e31c606a to your computer and use it in GitHub Desktop.
FastAPI to Typescript Fetch with openapi-fetch
import createClient from "openapi-fetch";
import { paths } from "./v1";
export const { GET, POST } = createClient<paths>({ baseUrl: "/fast-api" });
'use client'
import { POST } from '@lib/fast-api/client'
import { toast } from 'react-hot-toast'
export default function TestPage() {
const [websiteData, setWebsiteData] = useState(null)
const test = async () => {
toast.loading('Scraping...')
const { data, error } = await POST('/scrape-website', {
body: {
url: 'https://www.felixvemmer.com',
},
})
if (error) {
toast.error('Error scraping website')
return
}
toast.success('Scraped website')
setWebsiteData(data)
console.log(data)
}
return (
<>
<button
onClick={() => {
test()
}}
>
Test Scraping
</button>
<pre>{JSON.stringify(websiteData, null, 2)}</pre>
</>
)
}
/**
* This file was auto-generated by openapi-typescript.
* Do not make direct changes to the file.
*/
export interface paths {
"/scrape-website": {
/**
* Scrape Website
* @description This function scrapes the website content based on the provided URL.
*
* Args:
* request (ScrapeWebsiteRequest): The request object containing
* the URL of the website to be scraped.
*
* Returns:
* WebsiteContentOutputSchema: The response object containing the scraped website content.
*/
post: operations["scrape_website_scrape_website_post"];
};
}
export type webhooks = Record<string, never>;
export interface components {
schemas: {
/** HTTPValidationError */
HTTPValidationError: {
/** Detail */
detail?: components["schemas"]["ValidationError"][];
};
/**
* ScrapeWebsiteRequest
* @description A Pydantic model representing the request schema for scraping a website.
*/
ScrapeWebsiteRequest: {
/**
* Url
* @description The URL of the website to be scraped.
*/
url: string;
/**
* Keyword
* @description The keyword to be searched on the website.
*/
keyword?: string;
};
/** ValidationError */
ValidationError: {
/** Location */
loc: (string | number)[];
/** Message */
msg: string;
/** Error Type */
type: string;
};
/**
* WebsiteContentOutputSchema
* @description A Pydantic model representing the output schema for website content.
*/
WebsiteContentOutputSchema: {
/**
* Bodytext
* @description The body text of the website.
*/
bodyText: string;
/**
* Pagetitle
* @description The title of the webpage.
*/
pageTitle: string;
/**
* Metatitle
* @description The meta title of the webpage.
*/
metaTitle?: string;
/**
* Metadescription
* @description The meta description of the webpage.
*/
metaDescription?: string;
/**
* Metaimageurl
* @description The meta image URL of the webpage.
*/
metaImageUrl?: string;
/**
* Faviconimageurl
* @description The favicon image URL of the webpage.
*/
faviconImageUrl?: string;
/**
* Url
* @description The URL of the webpage.
*/
url: string;
/**
* Keyword
* @description The keyword to be searched on the website.
*/
keyword?: string;
};
};
responses: never;
parameters: never;
requestBodies: never;
headers: never;
pathItems: never;
}
export type $defs = Record<string, never>;
export type external = Record<string, never>;
export interface operations {
/**
* Scrape Website
* @description This function scrapes the website content based on the provided URL.
*
* Args:
* request (ScrapeWebsiteRequest): The request object containing
* the URL of the website to be scraped.
*
* Returns:
* WebsiteContentOutputSchema: The response object containing the scraped website content.
*/
scrape_website_scrape_website_post: {
requestBody: {
content: {
"application/json": components["schemas"]["ScrapeWebsiteRequest"];
};
};
responses: {
/** @description Successful Response */
200: {
content: {
"application/json": components["schemas"]["WebsiteContentOutputSchema"];
};
};
/** @description Validation Error */
422: {
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment