{ "name": "robots-parser", "version": "3.0.1", "description": "A specification compliant robots.txt parser with wildcard (*) matching support.", "keywords": [ "robots.txt", "parser", "user-agent", "scraper", "spider", "bot", "robots-exclusion-standard" ], "main": "index.js", "directories": { "test": "tests" }, "homepage": "https://github.com/samclarke/robots-parser", "bugs": { "url": "https://github.com/samclarke/robots-parser/issues" }, "scripts": { "test": "nyc --reporter=text-summary --reporter=html --reporter=lcovonly mocha" }, "repository": { "type": "git", "url": "https://github.com/samclarke/robots-parser.git" }, "author": "Sam Clarke ", "license": "MIT", "files": [ "/Robots.js", "/index.js", "/index.d.ts", "/README.md", "/LICENSE.md", "/CHANGELOG.md", "/SECURITY.md", "test/*.js", ".gitignore" ], "engines": { "node": ">=10.0.0" }, "prettier": { "tabWidth": 4, "useTabs": true, "singleQuote": true, "trailingComma": "none" }, "devDependencies": { "chai": "^4.3.6", "mocha": "^10.0.0", "nyc": "^15.1.0" }, "types": "./index.d.ts" }