-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: add markdownify and localscraper
- Loading branch information
Showing
14 changed files
with
672 additions
and
111 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
import asyncio | ||
|
||
from scrapegraph_py import AsyncClient | ||
from scrapegraph_py.logger import sgai_logger | ||
|
||
sgai_logger.set_logging(level="INFO") | ||
|
||
|
||
async def main(): | ||
# Initialize async client | ||
sgai_client = AsyncClient(api_key="your-api-key-here") | ||
|
||
# Concurrent markdownify requests | ||
urls = [ | ||
"https://scrapegraphai.com/", | ||
"https://github.com/ScrapeGraphAI/Scrapegraph-ai", | ||
] | ||
|
||
tasks = [sgai_client.markdownify(website_url=url) for url in urls] | ||
|
||
# Execute requests concurrently | ||
responses = await asyncio.gather(*tasks, return_exceptions=True) | ||
|
||
# Process results | ||
for i, response in enumerate(responses): | ||
if isinstance(response, Exception): | ||
print(f"\nError for {urls[i]}: {response}") | ||
else: | ||
print(f"\nPage {i+1} Markdown:") | ||
print(f"URL: {urls[i]}") | ||
print(f"Result: {response['result']}") | ||
|
||
await sgai_client.close() | ||
|
||
|
||
if __name__ == "__main__": | ||
asyncio.run(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
from scrapegraph_py import Client | ||
from scrapegraph_py.logger import sgai_logger | ||
|
||
sgai_logger.set_logging(level="INFO") | ||
|
||
# Initialize the client | ||
sgai_client = Client(api_key="your-api-key-here") | ||
|
||
# Example HTML content | ||
html_content = """ | ||
<html> | ||
<body> | ||
<h1>Company Name</h1> | ||
<p>We are a technology company focused on AI solutions.</p> | ||
<div class="contact"> | ||
<p>Email: contact@example.com</p> | ||
<p>Phone: (555) 123-4567</p> | ||
</div> | ||
</body> | ||
</html> | ||
""" | ||
|
||
# LocalScraper request | ||
response = sgai_client.localscraper( | ||
user_prompt="Extract the company description and contact information", | ||
website_html=html_content, | ||
) | ||
|
||
# Print the response | ||
print(f"Request ID: {response['request_id']}") | ||
print(f"Result: {response['result']}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
from scrapegraph_py import Client | ||
from scrapegraph_py.logger import sgai_logger | ||
|
||
sgai_logger.set_logging(level="INFO") | ||
|
||
# Initialize the client | ||
sgai_client = Client(api_key="your-api-key-here") | ||
|
||
# Markdownify request | ||
response = sgai_client.markdownify( | ||
website_url="https://example.com", | ||
) | ||
|
||
# Print the response | ||
print(f"Request ID: {response['request_id']}") | ||
print(f"Result: {response['result']}") |
Oops, something went wrong.