{
"name": "spatie/robots-txt",
"description": "Determine if a page may be crawled from robots.txt and robots meta tags",
"keywords": [
"spatie",
"robots-txt"
],
"homepage": "https://github.com/spatie/robots-txt",
"license": "MIT",
"authors": [
{
"name": "Brent Roose",
"email": "brent@spatie.be",
"homepage": "https://spatie.be",
"role": "Developer"
}
],
"require": {
"php": "^7.3 || ^8.0"
},
"require-dev": {
"larapack/dd": "^1.0",
"phpunit/phpunit": "^8.0 || ^9.0"
},
"autoload": {
"psr-4": {
"Spatie\\Robots\\": "src"
}
},
"autoload-dev": {
"psr-4": {
"Spatie\\Robots\\Tests\\": "tests"
}
},
"scripts": {
"test": "vendor/bin/phpunit",
"test-coverage": "phpunit --coverage-html coverage"
},
"config": {
"sort-packages": true
}
}