1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253 |
- <?xml version="1.0" ?>
- <!DOCTYPE INARY SYSTEM "https://raw.githubusercontent.com/Zaryob/inary/master/inary-spec.dtd">
- <INARY>
- <Source>
- <Name>perl-WWW-Robotrules</Name>
- <Homepage>http://search.cpan.org/~gaas/WWW-RobotRules-6.02/</Homepage>
- <Packager>
- <Name>Süleyman Poyraz</Name>
- <Email>zaryob.dev@gmail.com</Email>
- </Packager>
- <License>Artistic</License>
- <License>GPLv2</License>
- <PartOf>programming.language.perl</PartOf>
- <IsA>app:console</IsA>
- <Summary>database of robots.txt-derived permissions</Summary>
- <Description>This module parses /robots.txt files as specified in "A Standard for Robot Exclusion", at <http://www.robotstxt.org/wc/norobots.html> Webmasters can use the /robots.txt file to forbid conforming robots from accessing parts of their web site.</Description>
- <Archive sha1sum="e158e6559307878b32d8e4c241bf257c2bc88ebb" type="targz">http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/WWW-RobotRules-6.02.tar.gz</Archive>
- <BuildDependencies>
- <Dependency>perl-URI</Dependency>
- </BuildDependencies>
- </Source>
- <Package>
- <Name>perl-WWW-Robotrules</Name>
- <RuntimeDependencies>
- <Dependency>perl-URI</Dependency>
- </RuntimeDependencies>
- <Files>
- <Path fileType="library">/usr/lib</Path>
- <Path fileType="doc">/usr/share/doc</Path>
- <Path fileType="man">/usr/share/man</Path>
- <Path fileType="data">/usr/share</Path>
- </Files>
- </Package>
- <History>
- <Update release="2">
- <Date>2019-09-18</Date>
- <Version>6.02</Version>
- <Comment>Package is rebuilded because of perl upgrade and released up.</Comment>
- <Name>Süleyman Poyraz</Name>
- <Email>zaryob.dev@gmail.com</Email>
- </Update>
- <Update release="1">
- <Date>2019-09-16</Date>
- <Version>6.02</Version>
- <Comment>First release</Comment>
- <Name>Süleyman Poyraz</Name>
- <Email>zaryob.dev@gmail.com</Email>
- </Update>
- </History>
- </INARY>
|