Ping the Web
Pricing
Pay per usage
Go to Apify Store
Ping the Web
A simple endpoint to ping servers (OK/KO).
0.0 (0)
Pricing
Pay per usage
1
6
1
Last modified
2 years ago
Pricing
Pay per usage
A simple endpoint to ping servers (OK/KO).
0.0 (0)
Pricing
Pay per usage
1
6
1
Last modified
2 years ago
# First, specify the base Docker image.# You can see the Docker images from Apify at https://hub.docker.com/r/apify/.# You can also use any other image from Docker Hub.FROM apify/actor-python:3.11
# Second, copy just requirements.txt into the actor image,# since it should be the only file that affects the dependency install in the next step,# in order to speed up the buildCOPY requirements.txt ./
# Install the packages specified in requirements.txt,# Print the installed Python version, pip version# and all installed packages with their versions for debuggingRUN echo "Python version:" \ && python --version \ && echo "Pip version:" \ && pip --version \ && echo "Installing dependencies:" \ && pip install -r requirements.txt \ && echo "All installed Python packages:" \ && pip freeze
# Next, copy the remaining files and directories with the source code.# Since we do this after installing the dependencies, quick build will be really fast# for most source file changes.COPY . ./
# Specify how to launch the source code of your actor.# By default, the "python3 -m src" command is runCMD ["python3", "-m", "src"]
{ "actorSpecification": 1, "name": "ping-the-web", "title": "Ping the Web", "description": "Ping the Web.", "version": "1.0", "meta": { "templateId": "python-beautifulsoup" }, "input": "./input_schema.json", "dockerfile": "./Dockerfile"}
{ "title": "Ping the Web", "type": "object", "schemaVersion": 1, "properties": { "url": { "title": "Target server", "type": "string", "description": "The website you want to test. Do not include the protocol.", "prefill": "www.google.com", "editor": "textfield" }, "attempts": { "title": "Number of attempts", "type": "integer", "description": "Ping attempts.", "default": 5, "minimum": 2, "maximum": 25 }, "proxy": { "title": "Proxy configuration", "type": "object", "description": "Select proxies to be used by your crawler.", "prefill": { "useApifyProxy": true }, "editor": "proxy" } }, "required": ["url", "attempts"]}
1from apify import Actor2from icmplib import ping3
4def host_up(hostname:str, attempts:int):5 host = ping(hostname, count=attempts, interval=0.2)6 print(host)7 return host.is_alive8
9async def main():10 async with Actor:11 actor_input = await Actor.get_input() or {}12 # Structure of input is defined in .actor/input_schema.json13 host = actor_input.get('url')14 attempts = actor_input.get('attempts')15 response = "KO"16
17 try:18 if host_up(host, attempts):19 response = "OK"20 print(f"{host} - Connection successful!")21 except:22 print(f"{host} - Connection failed.")23 response = "KO"24
25 await Actor.push_data([26 {27 'result': response28 },29 ])
# configurations.idea
# crawlee and apify storage foldersapify_storagecrawlee_storagestorage
# installed files.venv
# git folder.git
root = true
[*]indent_style = spaceindent_size = 4charset = utf-8trim_trailing_whitespace = trueinsert_final_newline = trueend_of_line = lf
# This file tells Git which files shouldn't be added to source control
.idea.DS_Store
apify_storagestorage
.venv/.env/__pypackages__dist/build/*.egg-info/*.egg
__pycache__
.mypy_cache.dmypy.jsondmypy.json.pytest_cache
.scrapy*.log
1# Add your dependencies here.2# See https://pip.pypa.io/en/latest/reference/requirements-file-format/3# for how to format them4apify ~= 1.1.15icmplib