diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..8e7d1d4 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,46 @@ +# This workflow runs continuous CI across different versions of ruby on all branches and pull requests to develop. + +name: CI + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the develop branch + push: + branches: [ '**' ] + pull_request: + branches: [ develop ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + tests: + name: Ruby ${{ matrix.ruby }} + if: "contains(github.event.commits[0].message, '[ci skip]') == false" + runs-on: ubuntu-latest + env: + CI: true + strategy: + fail-fast: false + matrix: + ruby: + - 2.4 + - 2.5 + - 2.6 + - 2.7 + #- ruby-head # Commented out until net-http-persistent is updaated + - jruby + steps: + - name: Clone repository + uses: actions/checkout@v2 + - name: Set up Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + - name: Install dependencies + run: bundle install --jobs 4 --retry 3 + - name: Run tests + run: bundle exec rspec spec + diff --git a/.gitignore b/.gitignore index 762e32b..c9e8a74 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .DS_Store +/coverage /doc /pkg /.yardoc diff --git a/.travis.yml b/.travis.yml index 83a126b..67fd26c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,10 +7,12 @@ rvm: - 2.5 - 2.6 - 2.7 + - ruby-head - jruby cache: bundler sudo: false matrix: allow_failures: + - rvm: ruby-head - rvm: jruby dist: trusty diff --git a/.yardopts b/.yardopts index c4ea1fd..4c61f9a 100644 --- a/.yardopts +++ b/.yardopts @@ -6,7 +6,6 @@ --markup markdown --readme README.md - -History.markdown -AUTHORS VERSION UNLICENSE +etc/n3.ebnf diff --git a/AUTHORS b/AUTHORS deleted file mode 100644 index 53fa739..0000000 --- a/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -* Gregg Kellogg \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cabeb41..6b3faa2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,9 +28,11 @@ This repository uses [Git Flow](https://github.com/nvie/gitflow) to manage devel enough, be assured we will eventually add you in there. * Do note that in order for us to merge any non-trivial changes (as a rule of thumb, additions larger than about 15 lines of code), we need an - explicit [public domain dedication][PDD] on record from you. + explicit [public domain dedication][PDD] on record from you, + which you will be asked to agree to on the first commit to a repo within the organization. + Note that the agreement applies to all repos in the [Ruby RDF](https://github.com/ruby-rdf/) organization. [YARD]: https://yardoc.org/ [YARD-GS]: https://rubydoc.info/docs/yard/file/docs/GettingStarted.md -[PDD]: https://lists.w3.org/Archives/Public/public-rdf-ruby/2010May/0013.html +[PDD]: https://unlicense.org/#unlicensing-contributions [pr]: https://github.com/ruby-rdf/rdf-n3/compare/ diff --git a/Gemfile b/Gemfile index 714d784..d39376e 100644 --- a/Gemfile +++ b/Gemfile @@ -2,22 +2,28 @@ source "https://rubygems.org" gemspec -gem "rdf", git: "https://github.com/ruby-rdf/rdf", branch: "develop" +gem 'rdf', git: "https://github.com/ruby-rdf/rdf", branch: "develop" +gem 'sparql', git: "https://github.com/ruby-rdf/sparql", branch: "develop" +gem 'sxp', git: "https://github.com/dryruby/sxp.rb", branch: "develop" group :development do - gem "ebnf", git: "https://github.com/dryruby/ebnf", branch: "develop" - gem "rdf-aggregate-repo", git: "https://github.com/ruby-rdf/rdf-aggregate-repo", branch: "develop" - gem "rdf-spec", git: "https://github.com/ruby-rdf/rdf-spec", branch: "develop" - gem "rdf-isomorphic", git: "https://github.com/ruby-rdf/rdf-isomorphic", branch: "develop" - gem "rdf-trig", git: "https://github.com/ruby-rdf/rdf-trig", branch: "develop" - gem "rdf-turtle", git: "https://github.com/ruby-rdf/rdf-turtle", branch: "develop" + gem 'ebnf', git: "https://github.com/dryruby/ebnf", branch: "develop" + gem 'rdf-aggregate-repo', git: "https://github.com/ruby-rdf/rdf-aggregate-repo", branch: "develop" + gem 'rdf-spec', git: "https://github.com/ruby-rdf/rdf-spec", branch: "develop" + gem 'rdf-isomorphic', git: "https://github.com/ruby-rdf/rdf-isomorphic", branch: "develop" + gem 'rdf-trig', git: "https://github.com/ruby-rdf/rdf-trig", branch: "develop" + gem 'rdf-turtle', git: "https://github.com/ruby-rdf/rdf-turtle", branch: "develop" gem 'rdf-vocab', git: "https://github.com/ruby-rdf/rdf-vocab", branch: "develop" - gem "rdf-xsd", git: "https://github.com/ruby-rdf/rdf-xsd", branch: "develop" - gem "json-ld", git: "https://github.com/ruby-rdf/json-ld", branch: "develop" - gem 'sparql', git: "https://github.com/ruby-rdf/sparql", branch: "develop" - gem 'sxp', git: "https://github.com/dryruby/sxp.rb", branch: "develop" + gem 'rdf-xsd', git: "https://github.com/ruby-rdf/rdf-xsd", branch: "develop" + gem 'json-ld', git: "https://github.com/ruby-rdf/json-ld", branch: "develop" +end + +group :development, :test do + gem 'simplecov', '~> 0.16', platforms: :mri + gem 'coveralls', '~> 0.8', '>= 0.8.23', platforms: :mri end group :debug do - gem "byebug", platform: :mri + gem 'awesome_print', github: 'akshaymohite/awesome_print' + gem 'byebug', platform: :mri end diff --git a/History.markdown b/History.markdown deleted file mode 100644 index fdd5ab8..0000000 --- a/History.markdown +++ /dev/null @@ -1,99 +0,0 @@ -## 0.3.6 -* Update for RDF.rb 0.3.4 -* Added format detection. - -## 0.3.5 -* Use RDF::List for reading and writing lists. -* Performance improvements. -* Writer whitespace and property ordering improvements. -* Remove explicit Turtle support in Reader. - -## 0.3.4.1 -* In Reader, if no base\_uri is used, make sure that @prefix : <#> is generated, not @prefix : <>. -* In Writer, fix bug when trying to use `:standard\_prefixes` option. - -## 0.3.4 -* Reader accepts 1.0E1 in addition to 1.0e1 (case-insensitive match on exponent). -* Writer was not outputting xsd prefix if it was only used in a literal datatype. -* Use bare representations of xsd:integer, xsd:boolean, xsd:double, and xsd:decimal. -* Implement literal canonicalization (on option) in writer. - -## 0.3.3.1 -* Fixed bug in writer when given a base URI. - -## 0.3.3 -* Update dependencies to RDF.rb 0.3.3 -* Update specs to use open-uri-cached and Spira; no longer directly include W3C test cases. -* Use Bundler when running specs. -* Only output prefix definitions used in serialization. -* Fixed stack overflow in regular expression when matching long multi-line literals. -* Fixed bug (issue 14) where illegal QNames were generated in writer. - -## 0.3.2 -* Skipped - -## 0.3.1.3 -* Normalize language tags to lower case (only when canonicalizing). SPARQL specs expect the reader - to not screw with the language case for equivalence tests. - -## 0.3.1.2 -* Normalize language tags to lower case. - -## 0.3.1.1 -* Assert formats for :ttl, :turtle, and :notation3 in addition to :n3 - -## 0.3.1 -* Add application/turtle, application/x-turtle, text/rdf+n3 and application/rdf+n3 as mime types - matching this format, even though only text/turtle and text/n3 are valid. - -## 0.3.0 -* New Predictive-Parser based N3 Reader, substantially faster than previous Treetop-based parser -* RDF.rb 0.3.0 compatibility updates - * Remove literal_normalization and qname_hacks, add back uri_hacks (until 0.3.0) - * Use nil for default namespace - * In Writer - * Use only :prefixes for creating QNames. - * Add :standard_prefixes and :default_namespace options. - * Use """ for multi-line quotes, or anything including escaped characters - * In Reader - * URI canonicalization and validation. - * Added :canonicalize, and :intern options. - * Added #prefixes method returning a hash of prefix definitions. - * Change :strict option to :validate. - * Add check to ensure that predicates are not literals, it's not legal in any RDF variant. -* RSpec 2 compatibility - -## 0.2.3 -* In Writer, set @base_uri not @base, as :base_uri is an attribute. -* Relativize URLs without matching as regexp. -* Allow mixed case literal languages. -* Improve N3 Unicode support for Ruby 1.9 -* Improve Turtle/N3 Writer to use unescaped and qname'd values - -## 0.2.2 -* Ruby 1.9.2 compatibility -* Added script/tc to run test cases -* Fixed RDF.to_s != RDF.to_uri.to_s in writer, it worke for every other vocabulary -* Handle XMLLiteral when value is a Nokogiri node set. -* Simplify process_uri by not having a special case for ^# type URIs. -* Unescape values when creating URIs. -* URI normalization isn't required for N3, so removed. -* Added Reader#rewind and #close as stubs because document is parsed on initialize and input is closed. - -## 0.2.1 -* Compatible with RDF.rb 0.2.1 - -## 0.0.3 -* Replace require against rdf/rdfxml/patches/* with rdf/n3/patches/* - -## 0.0.2 -* N3 parsing and Turtle serialization substantially complete. - * A little more work needed on some tests and some lingering issues in RDF.rb to be resolved. -* Added script/console and script/parse -* Updates to reader to bring it in line with other readers. Implement uri() and ns() as helper functions for constructing URIs. -* Literal_normalization to override RDF::Literal.initialize and create Literal#valid? -* rdf_escape Literals when serializing via to_s -* Remove trailing "#" from URIs when normalizing. - -## 0.0.1 -* First port from RdfContext version 0.5.4 diff --git a/README.md b/README.md old mode 100755 new mode 100644 index dde8f4d..a56a506 --- a/README.md +++ b/README.md @@ -2,19 +2,28 @@ Notation-3 reader/writer for [RDF.rb][RDF.rb] . [![Gem Version](https://badge.fury.io/rb/rdf-n3.png)](https://badge.fury.io/rb/rdf-n3) -[![Build Status](https://travis-ci.org/ruby-rdf/rdf-n3.png?branch=master)](https://travis-ci.org/ruby-rdf/rdf-n3) +[![Build Status](https://github.com/ruby-rdf/rdf-n3/workflows/CI/badge.svg?branch=develop)](https://github.com/ruby-rdf/rdf-n3/actions?query=workflow%3ACI) +[![Coverage Status](https://coveralls.io/repos/ruby-rdf/rdf-n3/badge.svg)](https://coveralls.io/github/ruby-rdf/rdf-n3) +[![Gitter chat](https://badges.gitter.im/ruby-rdf/rdf.png)](https://gitter.im/ruby-rdf/rdf) ## Description -RDF::N3 is an Notation-3 parser for Ruby using the [RDF.rb][RDF.rb] library suite. Also implements N3 Entailment. +RDF::N3 is an Notation-3 parser and reasoner for Ruby using the [RDF.rb][RDF.rb] library suite. Reader inspired from TimBL predictiveParser and Python librdf implementation. -## Turtle deprecated -Support for Turtle mime-types and specific format support has been deprecated from this gem, -as Turtle is now implemented using [RDF::Turtle][RDF::Turtle]. +## Uses CG Specification +This version tracks the [W3C N3 Community Group][] [Specification][N3] which has incompatibilities with the [Team Submission][] version. Notably: + +* The `@keywords` declaration is removed, and most form of `@` keywords (e.g., `@is`, `@has`, `@true`) are no longer supported. +* Terminals adhere closely to their counterparts in [Turtle][]. +* The modifier `<-` is introduced as a synonym for `is ... of`. +* The SPARQL `BASE` and `PREFIX` declarations are supported. +* Implicit universal variables are defined at the top-level, rather than in the parent formula of the one in which they are defined. + +This brings N3 closer to compatibility with Turtle. ## Features -RDF::N3 parses [Notation-3][N3], [Turtle][Turtle] and [N-Triples][N-Triples] into statements or quads. It also performs reasoning and serializes to N3. +RDF::N3 parses [Notation-3][N3], [Turtle][] and [N-Triples][] into statements or quads. It also performs reasoning and serializes to N3. Install with `gem install rdf-n3` @@ -49,24 +58,104 @@ Experimental N3 reasoning is supported. Instantiate a reasoner from a dataset: end end -Reasoning is performed by turning a repository containing formula and predicate operators into an executable set of operators (similar to the executable SPARQL Algebra). Reasoning adds statements to the base dataset, marked with `:inferred` (e.g. `statement.inferred?`). Predicate operators are defined from the following vocabularies: - -* RDF List vocabulary - * list:append (not implemented yet - See {RDF::N3::Algebra::ListAppend}) - * list:in (not implemented yet - See {RDF::N3::Algebra::ListIn}) - * list:last (not implemented yet - See {RDF::N3::Algebra::ListLast}) - * list:member (not implemented yet - See {RDF::N3::Algebra::ListMember}) -* RDF Log vocabulary - * log:conclusion (not implemented yet - See {RDF::N3::Algebra::LogConclusion}) - * log:conjunction (not implemented yet - See {RDF::N3::Algebra::LogConjunction}) - * log:equalTo (See {not implemented yet - RDF::N3::Algebra::LogEqualTo}) - * log:implies (See {RDF::N3::Algebra::LogImplies}) - * log:includes (not implemented yet - See {RDF::N3::Algebra::LogIncludes}) - * log:notEqualTo (not implemented yet - See {RDF::N3::Algebra::LogNotEqualTo}) - * log:notIncludes (not implemented yet - See {RDF::N3::Algebra::LogNotIncludes}) - * log:outputString (not implemented yet - See {RDF::N3::Algebra::LogOutputString}) - -N3 Formulae are introduced with the { statement-list } syntax. A given formula is assigned an RDF::Node instance, which is also used as the graph_name for RDF::Statement instances provided to RDF::N3::Reader#each_statement. For example, the following N3 generates the associated statements: +Reasoning is performed by turning a repository containing formula and predicate operators into an executable set of operators (similar to the executable SPARQL Algebra). Reasoning adds statements to the base dataset, marked with `:inferred` (e.g. `statement.inferred?`). Predicate operators are defined from the following vocabularies. + +When dispatching built-in operators, precedence is given to operators whos operands are fully evaluated, followed by those having only variable output operands, followed by those having the fewest operands. Operators are evaluated until either no solutions are derived, or all operators have been completed. + +Reasoning is discussed in the [Design Issues][] document. + +#### RDF List vocabulary + + * `list:append` (See {RDF::N3::Algebra::List::Append}) + * `list:first` (See {RDF::N3::Algebra::List::First}) + * `list:in` (See {RDF::N3::Algebra::List::In}) + * `list:last` (See {RDF::N3::Algebra::List::Last}) + * `list:length` (See {RDF::N3::Algebra::List::Length}) + * `list:member` (See {RDF::N3::Algebra::List::Member}) + +#### RDF Log vocabulary + + * `log:conclusion` (See {RDF::N3::Algebra::Log::Conclusion}) + * `log:conjunction` (See {RDF::N3::Algebra::Log::Conjunction}) + * `log:content` (See {RDF::N3::Algebra::Log::Content}) + * `log:equalTo` (See {RDF::N3::Algebra::Log::EqualTo}) + * `log:implies` (See {RDF::N3::Algebra::Log::Implies}) + * `log:includes` (See {RDF::N3::Algebra::Log::Includes}) + * `log:n3String` (See {RDF::N3::Algebra::Log::N3String}) + * `log:notEqualTo` (See {RDF::N3::Algebra::Log::NotEqualTo}) + * `log:notIncludes` (See {RDF::N3::Algebra::Log::NotIncludes}) + * `log:outputString` See {RDF::N3::Algebra::Log::OutputString}) + * `log:parsedAsN3` (See {RDF::N3::Algebra::Log::ParsedAsN3}) + * `log:semantics` (See {RDF::N3::Algebra::Log::Semantics}) + +#### RDF Math vocabulary + + * `math:absoluteValue` (See {RDF::N3::Algebra::Math::AbsoluteValue}) + * `math:acos` (See {RDF::N3::Algebra::Math::ACos}) + * `math:asin` (See {RDF::N3::Algebra::Math::ASin}) + * `math:atan` (See {RDF::N3::Algebra::Math::ATan}) + * `math:acosh` (See {RDF::N3::Algebra::Math::ACosH}) + * `math:asinh` (See {RDF::N3::Algebra::Math::ASinH}) + * `math:atanh` (See {RDF::N3::Algebra::Math::ATanH}) + * `math:ceiling` (See {RDF::N3::Algebra::Math::Ceiling}) + * `math:cosh` (See {RDF::N3::Algebra::Math::CosH}) + * `math:cos` (See {RDF::N3::Algebra::Math::Cos}) + * `math:difference` (See {RDF::N3::Algebra::Math::Difference}) + * `math:equalTo` (See {RDF::N3::Algebra::Math::EqualTo}) + * `math:exponentiation` (See {RDF::N3::Algebra::Math::Exponentiation}) + * `math:floor` (See {RDF::N3::Algebra::Math::Floor}) + * `math:greaterThan` (See {RDF::N3::Algebra::Math::GreaterThan}) + * `math:lessThan` (See {RDF::N3::Algebra::Math::LessThan}) + * `math:negation` (See {RDF::N3::Algebra::Math::Negation}) + * `math:notEqualTo` (See {RDF::N3::Algebra::Math::NotEqualTo}) + * `math:notGreaterThan` (See {RDF::N3::Algebra::Math::NotGreaterThan}) + * `math:notLessThan` (See {RDF::N3::Algebra::Math::NotLessThan}) + * `math:product` (See {RDF::N3::Algebra::Math::Product}) + * `math:quotient` (See {RDF::N3::Algebra::Math::Quotient}) + * `math:remainder` (See {RDF::N3::Algebra::Math::Remainder}) + * `math:rounded` (See {RDF::N3::Algebra::Math::Rounded}) + * `math:sinh` (See {RDF::N3::Algebra::Math::SinH}) + * `math:sin` (See {RDF::N3::Algebra::Math::Sin}) + * `math:sum` (See {RDF::N3::Algebra::Math::Sum}) + * `math:tanh` (See {RDF::N3::Algebra::Math::TanH}) + * `math:tan` (See {RDF::N3::Algebra::Math::Tan}) + +#### RDF String vocabulary + + * `string:concatenation` (See {RDF::N3::Algebra::Str::Concatenation}) + * `string:contains` (See {RDF::N3::Algebra::Str::Contains}) + * `string:containsIgnoringCase` (See {RDF::N3::Algebra::Str::ContainsIgnoringCase}) + * `string:endsWith` (See {RDF::N3::Algebra::Str::EndsWith}) + * `string:equalIgnoringCase` (See {RDF::N3::Algebra::Str::EqualIgnoringCase}) + * `string:format` (See {RDF::N3::Algebra::Str::Format}) + * `string:greaterThan` (See {RDF::N3::Algebra::Str::GreaterThan}) + * `string:lessThan` (See {RDF::N3::Algebra::Str::LessThan}) + * `string:matches` (See {RDF::N3::Algebra::Str::Matches}) + * `string:notEqualIgnoringCase` (See {RDF::N3::Algebra::Str::NotEqualIgnoringCase}) + * `string:notGreaterThan` (See {RDF::N3::Algebra::Str::NotGreaterThan}) + * `string:notLessThan` (See {RDF::N3::Algebra::Str::NotLessThan}) + * `string:notMatches` (See {RDF::N3::Algebra::Str::NotMatches}) + * `string:replace` (See {RDF::N3::Algebra::Str::Replace}) + * `string:scrape` (See {RDF::N3::Algebra::Str::Scrape}) + * `string:startsWith` (See {RDF::N3::Algebra::Str::StartsWith}) + +#### RDF Time vocabulary <> + + * `time:dayOfWeek` (See {RDF::N3::Algebra::Time::DayOfWeek}) + * `time:day` (See {RDF::N3::Algebra::Time::Day}) + * `time:gmTime` (See {RDF::N3::Algebra::Time::GmTime}) + * `time:hour` (See {RDF::N3::Algebra::Time::Hour}) + * `time:inSeconds` (See {RDF::N3::Algebra::Time::InSeconds}) + * `time:localTime` (See {RDF::N3::Algebra::Time::LocalTime}) + * `time:minute` (See {RDF::N3::Algebra::Time::Minute}) + * `time:month` (See {RDF::N3::Algebra::Time::Month}) + * `time:second` (See {RDF::N3::Algebra::Time::Second}) + * `time:timeZone` (See {RDF::N3::Algebra::Time::Timezone}) + * `time:year` (See {RDF::N3::Algebra::Time::Year}) + +### Formulae / Quoted Graphs + +N3 Formulae are introduced with the `{ statement-list }` syntax. A given formula is assigned an `RDF::Node` instance, which is also used as the graph_name for `RDF::Statement` instances provided to `RDF::N3::Reader#each_statement`. For example, the following N3 generates the associated statements: @prefix x: . @prefix log: . @@ -81,10 +170,10 @@ when turned into an RDF Repository results in the following quads _:ora _:moby _:form . _:ora "Ora" _:form . -Reasoning uses a Notation3 Algebra, similar to [SPARQL S-Expressions](). This implementation considers formulae to be patterns, which may be asserted on statements made in the default graph, possibly loaded from a separate file. The logical relationships are reduced to algebraic operators. +Reasoning uses a Notation3 Algebra, similar to [SPARQL S-Expressions][]. This implementation considers formulae to be patterns, which may be asserted on statements made in the default graph, possibly loaded from a separate file. The logical relationships are reduced to algebraic operators. ### Variables -N3 Variables are introduced with @forAll, @forSome, or ?x. Variables reference URIs described in formulae, typically defined in the default vocabulary (e.g., ":x"). Existential variables are replaced with an allocated RDF::Node instance. Universal variables are replaced with a RDF::Query::Variable instance. For example, the following N3 generates the associated statements: +N3 Variables are introduced with `@forAll`, `@forSome`, or `?x`. Variables reference URIs described in formulae, typically defined in the default vocabulary (e.g., `":x"`). Existential variables are replaced with an allocated `RDF::Node` instance. Universal variables are replaced with a `RDF::Query::Variable` instance. For example, the following N3 generates the associated statements: @forAll <#h>. @forSome <#g>. <#g> <#loves> <#h> . @@ -96,21 +185,20 @@ results in: Note that the behavior of both existential and universal variables is not entirely in keeping with the [Team Submission][], and neither work quite like SPARQL variables. When used in the antecedent part of an implication, universal variables should behave much like SPARQL variables. This area is subject to a fair amount of change. -## Implementation Notes -The parser is driven through a rules table contained in lib/rdf/n3/reader/meta.rb. This includes -branch rules to indicate productions to be taken based on a current production. Terminals are denoted -through a set of regular expressions used to match each type of terminal. +* Variables, themselves, cannot be part of a solution, which limits the ability to generate updated rules for reasoning. +* Both Existentials and Universals are treated as simple variables, and there is really no preference given based on the order in which they are introduced. -The [meta.rb][file:lib/rdf/n3/reader/meta.rb] file is generated from lib/rdf/n3/reader/n3-selectors.n3 -(taken from http://www.w3.org/2000/10/swap/grammar/n3-selectors.n3) which is the result of parsing -http://www.w3.org/2000/10/swap/grammar/n3.n3 (along with bnf-rules.n3) using cwm using the following command sequence: +### Query +Formulae are typically used to query the knowledge-base, which is set from the base-formula/default graph. A formula is composed of both constant statements, and variable statements. When running as a query, such as for the antecedent formula in `log:implies`, statements including either explicit variables or blank nodes are treated as query patterns and are used to query the knowledge base to create a solution set, which is used either prove the formula correct, or create bindings passed to the consequent formula. - cwm n3.n3 bnf-rules.n3 --think --purge --data > n3-selectors.n3 - -[n3-selectors.n3][file:lib/rdf/n3/reader/n3-selectors.rb] is itself used to generate meta.rb using script/build_meta. +Blank nodes associated with rdf:List statements used as part of a built-in are made _non-distinguished_ existential variables, and patters containing these variables become optional. If they are not bound as part of the query, the implicitly are bound as the original blank nodes defined within the formula, which allows for both constant list arguments, list arguments that contain variables, or arguments which are variables expanding to lists. ## Dependencies -* [RDF.rb](https://rubygems.org/gems/rdf) (~> 3.0, >= 3.0.10) +* [Ruby](https://ruby-lang.org/) (>= 2.4) +* [RDF.rb](https://rubygems.org/gems/rdf) (~> 3.1, >= 3.1.4) +* [EBNF][EBNF gem] (~> 2.1) +* [SPARQL][SPARQL gem] (~> 3.1) +* [SXP][SXP gem] (~> 1.1) ## Documentation Full documentation available on [RubyDoc.info](https://rubydoc.info/github/ruby-rdf/rdf-n3) @@ -123,26 +211,11 @@ Full documentation available on [RubyDoc.info](https://rubydoc.info/github/ruby- * {RDF::N3::Writer} * {RDF::N3::Algebra} * {RDF::N3::Algebra::Formula} - * {RDF::N3::Algebra::ListAppend} - * {RDF::N3::Algebra::ListIn} - * {RDF::N3::Algebra::ListLast} - * {RDF::N3::Algebra::ListMember} - * {RDF::N3::Algebra::LogConclusion} - * {RDF::N3::Algebra::LogConjunction} - * {RDF::N3::Algebra::LogEqualTo} - * {RDF::N3::Algebra::LogImplies} - * {RDF::N3::Algebra::LogIncludes} - * {RDF::N3::Algebra::LogNotEqualTo} - * {RDF::N3::Algebra::LogNotIncludes} - * {RDF::N3::Algebra::LogOutputString} ### Additional vocabularies -* {RDF::N3::Log} * {RDF::N3::Rei} * {RDF::N3::Crypto} -* {RDF::N3::List} * {RDF::N3::Math} -* {RDF::N3::Str} * {RDF::N3::Time} ## Resources @@ -151,19 +224,17 @@ Full documentation available on [RubyDoc.info](https://rubydoc.info/github/ruby- * [Documentation](https://rubydoc.info/github/ruby-rdf/rdf-n3/) * [History](file:file.History.html) * [Notation-3][N3] +* [Team Submission][] * [N3 Primer](https://www.w3.org/2000/10/swap/Primer.html) * [N3 Reification](https://www.w3.org/DesignIssues/Reify.html) -* [Turtle][Turtle] -* [W3C SWAP Test suite](https://www.w3.org/2000/10/swap/test/README.html) -* [W3C Turtle Test suite](https://www.w3.org/2001/sw/DataAccess/df1/tests/README.txt) -* [N-Triples][N-Triples] +* [Turtle][] +* [W3C SWAP Test suite](https://w3c.github.io/N3/tests/) +* [W3C Turtle Test suite](https://w3c.github.io/rdf-tests/turtle/) +* [N-Triples][] ## Author * [Gregg Kellogg](https://github.com/gkellogg) - -## Contributors -* [Nicholas Humfrey](https://github.com/njh) - - ## Contributing This repository uses [Git Flow](https://github.com/nvie/gitflow) to mange development and release activity. All submissions _must_ be on a feature branch based on the _develop_ branch to ease staging and integration. @@ -177,7 +248,9 @@ This repository uses [Git Flow](https://github.com/nvie/gitflow) to mange develo list in the the `README`. Alphabetical order applies. * Do note that in order for us to merge any non-trivial changes (as a rule of thumb, additions larger than about 15 lines of code), we need an - explicit [public domain dedication][PDD] on record from you. + explicit [public domain dedication][PDD] on record from you, + which you will be asked to agree to on the first commit to a repo within the organization. + Note that the agreement applies to all repos in the [Ruby RDF](https://github.com/ruby-rdf/) organization. ## License @@ -190,13 +263,19 @@ see or the accompanying {file:UNLICENSE} file. * * -[RDF.rb]: https://ruby-rdf.github.com/rdf -[RDF::Turtle]: https://ruby-rdf.github.com/rdf-turtle/ -[N3]: https://www.w3.org/DesignIssues/Notation3.html "Notation-3" -[Team Submission]: https://www.w3.org/TeamSubmission/n3/ -[Turtle]: https://www.w3.org/TR/turtle/ -[N-Triples]: https://www.w3.org/TR/n-triples/ -[YARD]: https://yardoc.org/ -[YARD-GS]: https://rubydoc.info/docs/yard/file/docs/GettingStarted.md -[PDD]: https://lists.w3.org/Archives/Public/public-rdf-ruby/2010May/0013.html +[RDF::Turtle]: https://ruby-rdf.github.com/rdf-turtle/ +[Design Issues]: https://www.w3.org/DesignIssues/Notation3.html "Notation-3 Design Issues" +[Team Submission]: https://www.w3.org/TeamSubmission/n3/ +[Turtle]: https://www.w3.org/TR/turtle/ +[N-Triples]: https://www.w3.org/TR/n-triples/ +[YARD]: https://yardoc.org/ +[YARD-GS]: https://rubydoc.info/docs/yard/file/docs/GettingStarted.md +[PDD]: https://unlicense.org/#unlicensing-contributions [SPARQL S-Expressions]: https://jena.apache.org/documentation/notes/sse.html +[W3C N3 Community Group]: https://www.w3.org/community/n3-dev/ +[N3]: https://w3c.github.io/N3/spec/ +[PEG]: https://en.wikipedia.org/wiki/Parsing_expression_grammar +[RDF.rb]: https://ruby-rdf.github.com/rdf +[EBNF gem]: https://ruby-rdf.github.com/ebnf +[SPARQL gem]: https://ruby-rdf.github.com/sparql +[SXP gem]: https://ruby-rdf.github.com/sxp diff --git a/Rakefile b/Rakefile index 86cf690..22d0356 100644 --- a/Rakefile +++ b/Rakefile @@ -1,6 +1,4 @@ require 'rubygems' -require 'yard' -require 'rspec/core/rake_task' namespace :gem do desc "Build the rdf-n3-#{File.read('VERSION').chomp}.gem file" @@ -14,22 +12,17 @@ namespace :gem do end end -RSpec::Core::RakeTask.new(:spec) +namespace :etc do + ETC_FILES = %w{etc/n3.sxp} + desc 'Remove generated files in etc' + task :clean do + %x(rm #{ETC_FILES.join(' ')}) + end -desc "Run specs through RCov" -RSpec::Core::RakeTask.new("spec:rcov") do |spec| - spec.rcov = true - spec.rcov_opts = %q[--exclude "spec"] + desc 'Create versions of ebnf files in etc' + task build: ETC_FILES end -namespace :doc do - YARD::Rake::YardocTask.new - - desc "Generate HTML report specs" - RSpec::Core::RakeTask.new("spec") do |spec| - spec.rspec_opts = ["--format", "html", "-o", "doc/spec.html"] - end +file "etc/n3.sxp" => "etc/n3.ebnf" do |t| + %x{ebnf -o #{t.name} #{t.source}} end - -task specs: :spec -task default: :spec diff --git a/UNLICENSE b/UNLICENSE index 68a49da..efb9808 100644 --- a/UNLICENSE +++ b/UNLICENSE @@ -21,4 +21,4 @@ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -For more information, please refer to +For more information, please refer to diff --git a/VERSION b/VERSION index 94ff29c..ef538c2 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.1.1 +3.1.2 diff --git a/etc/abstract_syntax.md b/etc/abstract_syntax.md new file mode 100644 index 0000000..a7126df --- /dev/null +++ b/etc/abstract_syntax.md @@ -0,0 +1,26 @@ +# Notation-3 Abstract Syntax + +The [Notation-3][] Abstract Syntax generalizes the the [RDF Abstract Syntax](https://www.w3.org/TR/rdf11-concepts/) defined in [[RDF11-Concepts][]] further generalizing the concepts of [generalized RDF triple](https://www.w3.org/TR/rdf11-concepts/#dfn-generalized-rdf-triple) and [generalized RDF graph](https://www.w3.org/TR/rdf11-concepts/#dfn-generalized-rdf-graph). + +An [N3 triple](https://w3c.github.io/N3/spec/#N3-triple) is composed of three [N3 triple elements](https://w3c.github.io/N3/spec/#dfn-triple-elements), which each element can be an [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri), [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node), [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal), [list](https://w3c.github.io/N3/spec/#lists), [N3 graph](#n3-graph), or universal variable. + +An N3 graph abstracts a [generalized RDF graph](https://www.w3.org/TR/rdf11-concepts/#dfn-generalized-rdf-graph) is then a set of zero or more [N3 triples](https://w3c.github.io/N3/spec/#N3-triple) also having zero or more bound [universal variables](https://w3c.github.io/N3/spec/#dfn-universals). + +When appearing as the subject, predicate, or object of an [N3 triple](https://w3c.github.io/N3/spec/#N3-triple), an [N3 graph](#n3-graph) may also be quantified, unless given a separate interpretation by the semantics defined for the associated predicate (e.g., when it is a builtin). + +Note that in Notation-3, a [list](https://w3c.github.io/N3/spec/#lists) is a first-class resource, which may be quantified when appearing with a [quantified N3 formula](https://w3c.github.io/N3/spec/#quantified-formula). The notion of [RDF Collection](https://www.w3.org/TR/rdf11-mt/#rdf-collections) from [[RDF 1.1 Semantics](https://www.w3.org/TR/rdf11-mt/)] may be considered as a reification of an [N3 list](https://w3c.github.io/N3/spec/#lists). + +## Relationship to Datasets + +The description of the Abstract Syntax is based on the notion of resources, triples and graphs, where a graph may be a triple component, thus creating a recursive graph. This is similar to the notion of an [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) where a blank node becomes a stand-in for the graph when used within a triple, and that same blank node names a named graph containing the triples from the referenced graph. The fact that both blank nodes and N3 graphs are existentially quantized leads to similar semantics, although in RDF, datasets have no defined semantics. + +## Notes + +[Blank nodes](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node) in Notation-3 are unique across [N3 graphs](#n3-graph), unlike in other RDF syntaxes, however this is considered a concrete syntax-level concern, and does not affect the abstract syntax. + +Similarly, both [Blank Nodes](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node) and [universal variables](https://w3c.github.io/N3/spec/#dfn-universals) act as quantifiers and are scoped to a particular [N3 graph](#n3-graph). This can also be considered a concrete syntax-level concern which can be addressed by appropriately renaming variables at the global scope. + +An [N3 graph](#n3-graph) is often referred to as a [formula](https://w3c.github.io/N3/spec/#N3-formula) (plural, formulae), however, the concept of formula in N3 also includes [N3 triples](https://w3c.github.io/N3/spec/#N3-triple). + +[Notation-3]: https://w3c.github.io/N3/spec/ +[RDF11-Concepts]: https://www.w3.org/TR/rdf11-concepts/ \ No newline at end of file diff --git a/etc/doap.n3 b/etc/doap.n3 index 84226c7..c646b53 100644 --- a/etc/doap.n3 +++ b/etc/doap.n3 @@ -1,4 +1,4 @@ -@base . +@base . @prefix rdf: . @prefix rdfs: . @prefix dc: . @@ -10,7 +10,7 @@ <> a doap:Project ; doap:name "RDF::N3" ; doap:homepage ; - doap:license ; + doap:license ; doap:shortdesc "N3 reader/writer for Ruby."@en ; doap:description "RDF::N3 is an Notation-3 reader/writer and reasoner for the RDF.rb library suite."@en ; doap:created "2010-06-03"^^xsd:date; @@ -18,13 +18,13 @@ doap:implements ; doap:category , ; - doap:download-page ; - doap:mailing-list ; + doap:download-page <> ; + doap:mailing-list ; doap:bug-database ; - doap:blog ; - doap:developer ; + doap:blog ; + doap:developer ; doap:helper ; - doap:maintainer ; - doap:documenter ; - foaf:maker ; - dc:creator . + doap:maintainer ; + doap:documenter ; + foaf:maker ; + dc:creator . diff --git a/etc/doap.nt b/etc/doap.nt index 0449e43..cb33a3a 100644 --- a/etc/doap.nt +++ b/etc/doap.nt @@ -1,21 +1,21 @@ - . - "RDF::N3" . - . - . - "N3 reader/writer reader/writer and reasoner for Ruby."@en . - "RDF::N3 is an Notation-3 reader/writer for the RDF.rb library suite."@en . - "2010-06-03"^^ . - "Ruby" . - . - . - . - . - . - . - . - . - . - . - . - . - . + . + . + . + "Ruby" . + "N3 reader/writer for Ruby."@en . + . + . + . + . + . + "RDF::N3 is an Notation-3 reader/writer and reasoner for the RDF.rb library suite."@en . + . + . + . + "RDF::N3" . + . + . + "2010-06-03"^^ . + . + . + . diff --git a/etc/doap.ttl b/etc/doap.ttl new file mode 120000 index 0000000..d64db8e --- /dev/null +++ b/etc/doap.ttl @@ -0,0 +1 @@ +doap.n3 \ No newline at end of file diff --git a/etc/n3.ebnf b/etc/n3.ebnf new file mode 100644 index 0000000..7e48f2e --- /dev/null +++ b/etc/n3.ebnf @@ -0,0 +1,141 @@ + # EBNF Notation3 Grammar based pm Antlr4. + # From https://github.com/w3c/N3/blob/master/grammar/n3.g4 + + [1] n3Doc ::= (n3Statement '.' | sparqlDirective)* + + [2] n3Statement ::= n3Directive | triples | existential | universal + + [3] n3Directive ::= prefixID | base + + [4] sparqlDirective ::= sparqlBase | sparqlPrefix + + [5] sparqlBase ::= BASE IRIREF + + [6] sparqlPrefix ::= PREFIX PNAME_NS IRIREF + + [7] prefixID ::= '@prefix' PNAME_NS IRIREF + + [8] base ::= '@base' IRIREF + + [9] triples ::= subject predicateObjectList? + + [10] predicateObjectList ::= verb objectList (';' (verb objectList)?)* + + [11] objectList ::= object (',' object)* + + [12] verb ::= predicate + | 'a' + | 'has' expression + | 'is' expression 'of' + | '<-' expression # Synonym for is expression of + | '<=' + | '=>' + | '=' + + [13] subject ::= expression + + [14] predicate ::= expression + + [15] object ::= expression + + [16] expression ::= path + + [17] path ::= pathItem ('!' path | '^' path)? + + [18] pathItem ::= iri + | blankNode + | quickVar + | collection + | blankNodePropertyList + | literal + | formula + + [19] literal ::= rdfLiteral + | numericLiteral + | BOOLEAN_LITERAL + + [20] blankNodePropertyList ::= '[' predicateObjectList ']' + + [21] collection ::= '(' object* ')' + + [22] formula ::= '{' formulaContent? '}' + + [23] formulaContent ::= n3Statement ('.' formulaContent?)? + | sparqlDirective formulaContent? + + [24] numericLiteral ::= DOUBLE | DECIMAL | INTEGER + + [25] rdfLiteral ::= STRING (LANGTAG | '^^' iri)? + + [26] iri ::= IRIREF | prefixedName + + [27] iriList ::= iri ( ',' iri )* + + [28] prefixedName ::= PNAME_LN | PNAME_NS + # PNAME_NS will be matched for ':' (i.e., "empty") prefixedNames + # hence this cannot be a lexer rule; for s/p/o of only ':', PNAME_NS will be returned + # instead of PrefixedName token + + [29] blankNode ::= BLANK_NODE_LABEL | ANON + + [30] quickVar ::= QUICK_VAR_NAME + # only made this a parser rule for consistency + # (all other path-items are also parser rules) + + [31] existential ::= '@forSome' iriList + + [32] universal ::= '@forAll' iriList + + @terminals + + [33] BOOLEAN_LITERAL ::= 'true' | 'false' + + [34] STRING ::= STRING_LITERAL_LONG_SINGLE_QUOTE + | STRING_LITERAL_LONG_QUOTE + | STRING_LITERAL_QUOTE + | STRING_LITERAL_SINGLE_QUOTE + + /* borrowed from SPARQL spec, which excludes newlines and other nastiness */ + [139s] IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR | WS)* '>' + [140s] PNAME_NS ::= PN_PREFIX? ':' + [141s] PNAME_LN ::= PNAME_NS PN_LOCAL + [142s] BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? + [145s] LANGTAG ::= "@" ([a-zA-Z]+ ( "-" [a-zA-Z0-9]+ )*) - ("is" | "has") + [146s] INTEGER ::= [0-9]+ + [147s] DECIMAL ::= [0-9]* '.' [0-9]+ + [148s] DOUBLE ::= [0-9]+ '.' [0-9]* EXPONENT + | '.' ([0-9])+ EXPONENT | ([0-9])+ EXPONENT + [155s] EXPONENT ::= [eE] [+-]? [0-9]+ + [156s] STRING_LITERAL_QUOTE ::= '"' ( [^#x22#x5C#xA#xD] | ECHAR | UCHAR )* '"' + [157s] STRING_LITERAL_SINGLE_QUOTE ::= "'" ( [^#x27#x5C#xA#xD] | ECHAR | UCHAR )* "'" + [158s] STRING_LITERAL_LONG_SINGLE_QUOTE ::= "'''" ( ( "'" | "''" )? ( [^'\] | ECHAR | UCHAR ) )* "'''" + [159s] STRING_LITERAL_LONG_QUOTE ::= '"""' ( ( '"' | '""' )? ( [^"\] | ECHAR | UCHAR ) )* '"""' + [35] UCHAR ::= ( "\u" HEX HEX HEX HEX ) | ( "\U" HEX HEX HEX HEX HEX HEX HEX HEX ) + [160s] ECHAR ::= "\" [tbnrf\"'] + [162s] WS ::= #x20 | #x9 | #xD | #xA + [163s] ANON ::= '[' WS* ']' + [36] QUICK_VAR_NAME ::= "?" PN_LOCAL + /* Allows fuller character set */ + [164s] PN_CHARS_BASE ::= [A-Z] | [a-z] | [#x00C0-#x00D6] + | [#x00D8-#x00F6] | [#x00F8-#x02FF] | [#x0370-#x037D] + | [#x037F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] + | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] + | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF] + [165s] PN_CHARS_U ::= PN_CHARS_BASE | '_' + [167s] PN_CHARS ::= PN_CHARS_U | "-" | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040] + /* BASE and PREFIX must be case-insensitive, hence these monstrosities */ + [37] BASE ::= ('B'|'b') ('A'|'a') ('S'|'s') ('E'|'e') + [38] PREFIX ::= ('P'|'p') ('R'|'r') ('E'|'e') ('F'|'f') ('I'|'i') ('X'|'x') + [168s] PN_PREFIX ::= PN_CHARS_BASE ( ( PN_CHARS | "." )* PN_CHARS )? + [169s] PN_LOCAL ::= ( PN_CHARS_U | ':' | [0-9] | PLX ) ( ( PN_CHARS | '.' | ':' | PLX )* ( PN_CHARS | ':' | PLX ) ) ? + [170s] PLX ::= PERCENT | PN_LOCAL_ESC + [171s] PERCENT ::= '%' HEX HEX + [172s] HEX ::= [0-9] | [A-F] | [a-f] + [173s] PN_LOCAL_ESC ::= '\' ( '_' | '~' | '.' | '-' | '!' | '$' | '&' | "'" | '(' | ')' | '*' | '+' | ',' | ';' | '=' + | '/' | '?' | '#' | '@' | '%' ) + [39] COMMENT ::= ('#' - '#x') [^#xA#xC#xD]* + + # Ignore all whitespace and comments between non-terminals + @pass ( WS | COMMENT )* + + diff --git a/etc/n3.peg.sxp b/etc/n3.peg.sxp new file mode 100644 index 0000000..1cbfbc7 --- /dev/null +++ b/etc/n3.peg.sxp @@ -0,0 +1,235 @@ +( + (rule n3Doc "1" (star _n3Doc_1)) + (rule _n3Doc_1 "1.1" (alt _n3Doc_2 sparqlDirective)) + (rule _n3Doc_2 "1.2" (seq n3Statement ".")) + (rule n3Statement "2" (alt n3Directive triples existential universal)) + (rule n3Directive "3" (alt prefixID base)) + (rule sparqlDirective "4" (alt sparqlBase sparqlPrefix)) + (rule sparqlBase "5" (seq BASE IRIREF)) + (rule sparqlPrefix "6" (seq PREFIX PNAME_NS IRIREF)) + (rule prefixID "7" (seq "@prefix" PNAME_NS IRIREF)) + (rule base "8" (seq "@base" IRIREF)) + (rule triples "9" (seq subject _triples_1)) + (rule _triples_1 "9.1" (opt predicateObjectList)) + (rule predicateObjectList "10" (seq verb objectList _predicateObjectList_1)) + (rule _predicateObjectList_1 "10.1" (star _predicateObjectList_2)) + (rule _predicateObjectList_2 "10.2" (seq ";" _predicateObjectList_3)) + (rule _predicateObjectList_3 "10.3" (opt _predicateObjectList_4)) + (rule _predicateObjectList_4 "10.4" (seq verb objectList)) + (rule objectList "11" (seq object _objectList_1)) + (rule _objectList_1 "11.1" (star _objectList_2)) + (rule _objectList_2 "11.2" (seq "," object)) + (rule verb "12" (alt predicate "a" _verb_1 _verb_2 _verb_3 "<=" "=>" "=")) + (rule _verb_1 "12.1" (seq "has" expression)) + (rule _verb_2 "12.2" (seq "is" expression "of")) + (rule _verb_3 "12.3" (seq "<-" expression)) + (rule subject "13" (seq expression)) + (rule predicate "14" (seq expression)) + (rule object "15" (seq expression)) + (rule expression "16" (seq path)) + (rule path "17" (seq pathItem _path_1)) + (rule _path_1 "17.1" (opt _path_2)) + (rule _path_2 "17.2" (alt _path_3 _path_4)) + (rule _path_3 "17.3" (seq "!" path)) + (rule _path_4 "17.4" (seq "^" path)) + (rule pathItem "18" + (alt iri blankNode quickVar collection blankNodePropertyList literal formula)) + (rule literal "19" (alt rdfLiteral numericLiteral BOOLEAN_LITERAL)) + (rule blankNodePropertyList "20" (seq "[" predicateObjectList "]")) + (rule collection "21" (seq "(" _collection_1 ")")) + (rule _collection_1 "21.1" (star object)) + (rule formula "22" (seq "{" _formula_1 "}")) + (rule _formula_1 "22.1" (opt formulaContent)) + (rule formulaContent "23" (alt _formulaContent_1 _formulaContent_2)) + (rule _formulaContent_1 "23.1" (seq n3Statement _formulaContent_3)) + (rule _formulaContent_3 "23.3" (opt _formulaContent_4)) + (rule _formulaContent_4 "23.4" (seq "." _formulaContent_5)) + (rule _formulaContent_5 "23.5" (opt formulaContent)) + (rule _formulaContent_2 "23.2" (seq sparqlDirective _formulaContent_6)) + (rule _formulaContent_6 "23.6" (opt formulaContent)) + (rule numericLiteral "24" (alt DOUBLE DECIMAL INTEGER)) + (rule rdfLiteral "25" (seq STRING _rdfLiteral_1)) + (rule _rdfLiteral_1 "25.1" (opt _rdfLiteral_2)) + (rule _rdfLiteral_2 "25.2" (alt LANGTAG _rdfLiteral_3)) + (rule _rdfLiteral_3 "25.3" (seq "^^" iri)) + (rule iri "26" (alt IRIREF prefixedName)) + (rule iriList "27" (seq iri _iriList_1)) + (rule _iriList_1 "27.1" (star _iriList_2)) + (rule _iriList_2 "27.2" (seq "," iri)) + (rule prefixedName "28" (alt PNAME_LN PNAME_NS)) + (rule blankNode "29" (alt BLANK_NODE_LABEL ANON)) + (rule quickVar "30" (seq QUICK_VAR_NAME)) + (rule existential "31" (seq "@forSome" iriList)) + (rule universal "32" (seq "@forAll" iriList)) + (terminals _terminals (seq)) + (terminal BOOLEAN_LITERAL "33" (alt "true" "false")) + (terminal STRING "34" + (alt STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_LONG_QUOTE + STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE )) + (terminal IRIREF "139s" (seq "<" _IRIREF_1 ">")) + (terminal _IRIREF_1 "139s.1" (star _IRIREF_2)) + (terminal _IRIREF_2 "139s.2" (alt _IRIREF_3 UCHAR WS)) + (terminal _IRIREF_3 "139s.3" (diff _IRIREF_4 _IRIREF_5)) + (terminal _IRIREF_4 "139s.4" (range "^<>\"{}|^`\\")) + (terminal _IRIREF_5 "139s.5" (range "#x00-#x20")) + (terminal PNAME_NS "140s" (seq _PNAME_NS_1 ":")) + (terminal _PNAME_NS_1 "140s.1" (opt PN_PREFIX)) + (terminal PNAME_LN "141s" (seq PNAME_NS PN_LOCAL)) + (terminal BLANK_NODE_LABEL "142s" + (seq "_:" _BLANK_NODE_LABEL_1 _BLANK_NODE_LABEL_2)) + (terminal _BLANK_NODE_LABEL_1 "142s.1" (alt PN_CHARS_U _BLANK_NODE_LABEL_3)) + (terminal _BLANK_NODE_LABEL_3 "142s.3" (range "0-9")) + (terminal _BLANK_NODE_LABEL_2 "142s.2" (opt _BLANK_NODE_LABEL_4)) + (terminal _BLANK_NODE_LABEL_4 "142s.4" (seq _BLANK_NODE_LABEL_5 PN_CHARS)) + (terminal _BLANK_NODE_LABEL_5 "142s.5" (star _BLANK_NODE_LABEL_6)) + (terminal _BLANK_NODE_LABEL_6 "142s.6" (alt PN_CHARS ".")) + (terminal LANGTAG "145s" (seq "@" _LANGTAG_1)) + (terminal _LANGTAG_1 "145s.1" (diff _LANGTAG_2 _LANGTAG_3)) + (terminal _LANGTAG_2 "145s.2" (seq _LANGTAG_4 _LANGTAG_5)) + (terminal _LANGTAG_4 "145s.4" (plus _LANGTAG_6)) + (terminal _LANGTAG_6 "145s.6" (range "a-zA-Z")) + (terminal _LANGTAG_5 "145s.5" (star _LANGTAG_7)) + (terminal _LANGTAG_7 "145s.7" (seq "-" _LANGTAG_8)) + (terminal _LANGTAG_8 "145s.8" (plus _LANGTAG_9)) + (terminal _LANGTAG_9 "145s.9" (range "a-zA-Z0-9")) + (terminal _LANGTAG_3 "145s.3" (alt "is" "has")) + (terminal INTEGER "146s" (plus _INTEGER_1)) + (terminal _INTEGER_1 "146s.1" (range "0-9")) + (terminal DECIMAL "147s" (seq _DECIMAL_1 "." _DECIMAL_2)) + (terminal _DECIMAL_1 "147s.1" (star _DECIMAL_3)) + (terminal _DECIMAL_3 "147s.3" (range "0-9")) + (terminal _DECIMAL_2 "147s.2" (plus _DECIMAL_4)) + (terminal _DECIMAL_4 "147s.4" (range "0-9")) + (terminal DOUBLE "148s" (alt _DOUBLE_1 _DOUBLE_2 _DOUBLE_3)) + (terminal _DOUBLE_1 "148s.1" (seq _DOUBLE_4 "." _DOUBLE_5 EXPONENT)) + (terminal _DOUBLE_4 "148s.4" (plus _DOUBLE_6)) + (terminal _DOUBLE_6 "148s.6" (range "0-9")) + (terminal _DOUBLE_5 "148s.5" (star _DOUBLE_7)) + (terminal _DOUBLE_7 "148s.7" (range "0-9")) + (terminal _DOUBLE_2 "148s.2" (seq "." _DOUBLE_8 EXPONENT)) + (terminal _DOUBLE_8 "148s.8" (plus _DOUBLE_9)) + (terminal _DOUBLE_9 "148s.9" (range "0-9")) + (terminal _DOUBLE_3 "148s.3" (seq _DOUBLE_10 EXPONENT)) + (terminal _DOUBLE_10 "148s.10" (plus _DOUBLE_11)) + (terminal _DOUBLE_11 "148s.11" (range "0-9")) + (terminal EXPONENT "155s" (seq _EXPONENT_1 _EXPONENT_2 _EXPONENT_3)) + (terminal _EXPONENT_1 "155s.1" (range "eE")) + (terminal _EXPONENT_2 "155s.2" (opt _EXPONENT_4)) + (terminal _EXPONENT_4 "155s.4" (range "+-")) + (terminal _EXPONENT_3 "155s.3" (plus _EXPONENT_5)) + (terminal _EXPONENT_5 "155s.5" (range "0-9")) + (terminal STRING_LITERAL_QUOTE "156s" (seq "\"" _STRING_LITERAL_QUOTE_1 "\"")) + (terminal _STRING_LITERAL_QUOTE_1 "156s.1" (star _STRING_LITERAL_QUOTE_2)) + (terminal _STRING_LITERAL_QUOTE_2 "156s.2" + (alt _STRING_LITERAL_QUOTE_3 ECHAR UCHAR)) + (terminal _STRING_LITERAL_QUOTE_3 "156s.3" (range "^#x22#x5C#xA#xD")) + (terminal STRING_LITERAL_SINGLE_QUOTE "157s" + (seq "'" _STRING_LITERAL_SINGLE_QUOTE_1 "'")) + (terminal _STRING_LITERAL_SINGLE_QUOTE_1 "157s.1" + (star _STRING_LITERAL_SINGLE_QUOTE_2)) + (terminal _STRING_LITERAL_SINGLE_QUOTE_2 "157s.2" + (alt _STRING_LITERAL_SINGLE_QUOTE_3 ECHAR UCHAR)) + (terminal _STRING_LITERAL_SINGLE_QUOTE_3 "157s.3" (range "^#x27#x5C#xA#xD")) + (terminal STRING_LITERAL_LONG_SINGLE_QUOTE "158s" + (seq "'''" _STRING_LITERAL_LONG_SINGLE_QUOTE_1 "'''")) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_1 "158s.1" + (star _STRING_LITERAL_LONG_SINGLE_QUOTE_2)) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_2 "158s.2" + (seq _STRING_LITERAL_LONG_SINGLE_QUOTE_3 _STRING_LITERAL_LONG_SINGLE_QUOTE_4)) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_3 "158s.3" + (opt _STRING_LITERAL_LONG_SINGLE_QUOTE_5)) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_5 "158s.5" (alt "'" "''")) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_4 "158s.4" + (alt _STRING_LITERAL_LONG_SINGLE_QUOTE_6 ECHAR UCHAR)) + (terminal _STRING_LITERAL_LONG_SINGLE_QUOTE_6 "158s.6" (range "^'\\")) + (terminal STRING_LITERAL_LONG_QUOTE "159s" + (seq "\"\"\"" _STRING_LITERAL_LONG_QUOTE_1 "\"\"\"")) + (terminal _STRING_LITERAL_LONG_QUOTE_1 "159s.1" (star _STRING_LITERAL_LONG_QUOTE_2)) + (terminal _STRING_LITERAL_LONG_QUOTE_2 "159s.2" + (seq _STRING_LITERAL_LONG_QUOTE_3 _STRING_LITERAL_LONG_QUOTE_4)) + (terminal _STRING_LITERAL_LONG_QUOTE_3 "159s.3" (opt _STRING_LITERAL_LONG_QUOTE_5)) + (terminal _STRING_LITERAL_LONG_QUOTE_5 "159s.5" (alt "\"" "\"\"")) + (terminal _STRING_LITERAL_LONG_QUOTE_4 "159s.4" + (alt _STRING_LITERAL_LONG_QUOTE_6 ECHAR UCHAR)) + (terminal _STRING_LITERAL_LONG_QUOTE_6 "159s.6" (range "^\"\\")) + (terminal UCHAR "35" (alt _UCHAR_1 _UCHAR_2)) + (terminal _UCHAR_1 "35.1" (seq "\\u" HEX HEX HEX HEX)) + (terminal _UCHAR_2 "35.2" (seq "\\U" HEX HEX HEX HEX HEX HEX HEX HEX)) + (terminal ECHAR "160s" (seq "\\" _ECHAR_1)) + (terminal _ECHAR_1 "160s.1" (range "tbnrf\\\"'")) + (terminal WS "162s" (alt _WS_1 _WS_2 _WS_3 _WS_4)) + (terminal _WS_1 "162s.1" (hex "#x20")) + (terminal _WS_2 "162s.2" (hex "#x9")) + (terminal _WS_3 "162s.3" (hex "#xD")) + (terminal _WS_4 "162s.4" (hex "#xA")) + (terminal ANON "163s" (seq "[" _ANON_1 "]")) + (terminal _ANON_1 "163s.1" (star WS)) + (terminal QUICK_VAR_NAME "36" (seq "?" PN_LOCAL)) + (terminal PN_CHARS_BASE "164s" + (alt _PN_CHARS_BASE_1 _PN_CHARS_BASE_2 _PN_CHARS_BASE_3 _PN_CHARS_BASE_4 + _PN_CHARS_BASE_5 _PN_CHARS_BASE_6 _PN_CHARS_BASE_7 _PN_CHARS_BASE_8 + _PN_CHARS_BASE_9 _PN_CHARS_BASE_10 _PN_CHARS_BASE_11 _PN_CHARS_BASE_12 + _PN_CHARS_BASE_13 _PN_CHARS_BASE_14 )) + (terminal _PN_CHARS_BASE_1 "164s.1" (range "A-Z")) + (terminal _PN_CHARS_BASE_2 "164s.2" (range "a-z")) + (terminal _PN_CHARS_BASE_3 "164s.3" (range "#x00C0-#x00D6")) + (terminal _PN_CHARS_BASE_4 "164s.4" (range "#x00D8-#x00F6")) + (terminal _PN_CHARS_BASE_5 "164s.5" (range "#x00F8-#x02FF")) + (terminal _PN_CHARS_BASE_6 "164s.6" (range "#x0370-#x037D")) + (terminal _PN_CHARS_BASE_7 "164s.7" (range "#x037F-#x1FFF")) + (terminal _PN_CHARS_BASE_8 "164s.8" (range "#x200C-#x200D")) + (terminal _PN_CHARS_BASE_9 "164s.9" (range "#x2070-#x218F")) + (terminal _PN_CHARS_BASE_10 "164s.10" (range "#x2C00-#x2FEF")) + (terminal _PN_CHARS_BASE_11 "164s.11" (range "#x3001-#xD7FF")) + (terminal _PN_CHARS_BASE_12 "164s.12" (range "#xF900-#xFDCF")) + (terminal _PN_CHARS_BASE_13 "164s.13" (range "#xFDF0-#xFFFD")) + (terminal _PN_CHARS_BASE_14 "164s.14" (range "#x10000-#xEFFFF")) + (terminal PN_CHARS_U "165s" (alt PN_CHARS_BASE "_")) + (terminal PN_CHARS "167s" + (alt PN_CHARS_U "-" _PN_CHARS_1 _PN_CHARS_2 _PN_CHARS_3 _PN_CHARS_4)) + (terminal _PN_CHARS_1 "167s.1" (range "0-9")) + (terminal _PN_CHARS_2 "167s.2" (hex "#x00B7")) + (terminal _PN_CHARS_3 "167s.3" (range "#x0300-#x036F")) + (terminal _PN_CHARS_4 "167s.4" (range "#x203F-#x2040")) + (terminal BASE "37" (seq _BASE_1 _BASE_2 _BASE_3 _BASE_4)) + (terminal _BASE_1 "37.1" (alt "B" "b")) + (terminal _BASE_2 "37.2" (alt "A" "a")) + (terminal _BASE_3 "37.3" (alt "S" "s")) + (terminal _BASE_4 "37.4" (alt "E" "e")) + (terminal PREFIX "38" + (seq _PREFIX_1 _PREFIX_2 _PREFIX_3 _PREFIX_4 _PREFIX_5 _PREFIX_6)) + (terminal _PREFIX_1 "38.1" (alt "P" "p")) + (terminal _PREFIX_2 "38.2" (alt "R" "r")) + (terminal _PREFIX_3 "38.3" (alt "E" "e")) + (terminal _PREFIX_4 "38.4" (alt "F" "f")) + (terminal _PREFIX_5 "38.5" (alt "I" "i")) + (terminal _PREFIX_6 "38.6" (alt "X" "x")) + (terminal PN_PREFIX "168s" (seq PN_CHARS_BASE _PN_PREFIX_1)) + (terminal _PN_PREFIX_1 "168s.1" (opt _PN_PREFIX_2)) + (terminal _PN_PREFIX_2 "168s.2" (seq _PN_PREFIX_3 PN_CHARS)) + (terminal _PN_PREFIX_3 "168s.3" (star _PN_PREFIX_4)) + (terminal _PN_PREFIX_4 "168s.4" (alt PN_CHARS ".")) + (terminal PN_LOCAL "169s" (seq _PN_LOCAL_1 _PN_LOCAL_2)) + (terminal _PN_LOCAL_1 "169s.1" (alt PN_CHARS_U ":" _PN_LOCAL_3 PLX)) + (terminal _PN_LOCAL_3 "169s.3" (range "0-9")) + (terminal _PN_LOCAL_2 "169s.2" (opt _PN_LOCAL_4)) + (terminal _PN_LOCAL_4 "169s.4" (seq _PN_LOCAL_5 _PN_LOCAL_6)) + (terminal _PN_LOCAL_5 "169s.5" (star _PN_LOCAL_7)) + (terminal _PN_LOCAL_7 "169s.7" (alt PN_CHARS "." ":" PLX)) + (terminal _PN_LOCAL_6 "169s.6" (alt PN_CHARS ":" PLX)) + (terminal PLX "170s" (alt PERCENT PN_LOCAL_ESC)) + (terminal PERCENT "171s" (seq "%" HEX HEX)) + (terminal HEX "172s" (alt _HEX_1 _HEX_2 _HEX_3)) + (terminal _HEX_1 "172s.1" (range "0-9")) + (terminal _HEX_2 "172s.2" (range "A-F")) + (terminal _HEX_3 "172s.3" (range "a-f")) + (terminal PN_LOCAL_ESC "173s" (seq "\\" _PN_LOCAL_ESC_1)) + (terminal _PN_LOCAL_ESC_1 "173s.1" + (alt "_" "~" "." "-" "!" "$" "&" "'" "(" ")" "*" "+" "," ";" "=" "/" "?" "#" + "@" "%" )) + (terminal COMMENT "39" (seq _COMMENT_1 _COMMENT_2)) + (terminal _COMMENT_1 "39.1" (diff "#" "#x")) + (terminal _COMMENT_2 "39.2" (star _COMMENT_3)) + (terminal _COMMENT_3 "39.3" (range "^#xA#xC#xD")) + (pass _pass (star __pass_1)) + (rule __pass_1 (alt WS COMMENT))) diff --git a/etc/n3.sxp b/etc/n3.sxp new file mode 100644 index 0000000..102eed7 --- /dev/null +++ b/etc/n3.sxp @@ -0,0 +1,119 @@ +( + (rule n3Doc "1" (star (alt (seq n3Statement ".") sparqlDirective))) + (rule n3Statement "2" (alt n3Directive triples existential universal)) + (rule n3Directive "3" (alt prefixID base)) + (rule sparqlDirective "4" (alt sparqlBase sparqlPrefix)) + (rule sparqlBase "5" (seq BASE IRIREF)) + (rule sparqlPrefix "6" (seq PREFIX PNAME_NS IRIREF)) + (rule prefixID "7" (seq "@prefix" PNAME_NS IRIREF)) + (rule base "8" (seq "@base" IRIREF)) + (rule triples "9" (seq subject (opt predicateObjectList))) + (rule predicateObjectList "10" + (seq verb objectList (star (seq ";" (opt (seq verb objectList)))))) + (rule objectList "11" (seq object (star (seq "," object)))) + (rule verb "12" + (alt predicate "a" + (seq "has" expression) + (seq "is" expression "of") + (seq "<-" expression) "<=" "=>" "=" )) + (rule subject "13" (seq expression)) + (rule predicate "14" (seq expression)) + (rule object "15" (seq expression)) + (rule expression "16" (seq path)) + (rule path "17" (seq pathItem (opt (alt (seq "!" path) (seq "^" path))))) + (rule pathItem "18" + (alt iri blankNode quickVar collection blankNodePropertyList literal formula)) + (rule literal "19" (alt rdfLiteral numericLiteral BOOLEAN_LITERAL)) + (rule blankNodePropertyList "20" (seq "[" predicateObjectList "]")) + (rule collection "21" (seq "(" (star object) ")")) + (rule formula "22" (seq "{" (opt formulaContent) "}")) + (rule formulaContent "23" + (alt + (seq n3Statement (opt (seq "." (opt formulaContent)))) + (seq sparqlDirective (opt formulaContent))) ) + (rule numericLiteral "24" (alt DOUBLE DECIMAL INTEGER)) + (rule rdfLiteral "25" (seq STRING (opt (alt LANGTAG (seq "^^" iri))))) + (rule iri "26" (alt IRIREF prefixedName)) + (rule iriList "27" (seq iri (star (seq "," iri)))) + (rule prefixedName "28" (alt PNAME_LN PNAME_NS)) + (rule blankNode "29" (alt BLANK_NODE_LABEL ANON)) + (rule quickVar "30" (seq QUICK_VAR_NAME)) + (rule existential "31" (seq "@forSome" iriList)) + (rule universal "32" (seq "@forAll" iriList)) + (terminals _terminals (seq)) + (terminal BOOLEAN_LITERAL "33" (alt "true" "false")) + (terminal STRING "34" + (alt STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_LONG_QUOTE + STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE )) + (terminal IRIREF "139s" + (seq "<" (star (alt (diff (range "^<>\"{}|^`\\") (range "#x00-#x20")) UCHAR WS)) ">")) + (terminal PNAME_NS "140s" (seq (opt PN_PREFIX) ":")) + (terminal PNAME_LN "141s" (seq PNAME_NS PN_LOCAL)) + (terminal BLANK_NODE_LABEL "142s" + (seq "_:" (alt PN_CHARS_U (range "0-9")) (opt (seq (star (alt PN_CHARS ".")) PN_CHARS)))) + (terminal LANGTAG "145s" + (seq "@" + (diff (seq (plus (range "a-zA-Z")) (star (seq "-" (plus (range "a-zA-Z0-9"))))) (alt "is" "has"))) ) + (terminal INTEGER "146s" (plus (range "0-9"))) + (terminal DECIMAL "147s" (seq (star (range "0-9")) "." (plus (range "0-9")))) + (terminal DOUBLE "148s" + (alt + (seq (plus (range "0-9")) "." (star (range "0-9")) EXPONENT) + (seq "." (plus (range "0-9")) EXPONENT) + (seq (plus (range "0-9")) EXPONENT)) ) + (terminal EXPONENT "155s" (seq (range "eE") (opt (range "+-")) (plus (range "0-9")))) + (terminal STRING_LITERAL_QUOTE "156s" + (seq "\"" (star (alt (range "^#x22#x5C#xA#xD") ECHAR UCHAR)) "\"")) + (terminal STRING_LITERAL_SINGLE_QUOTE "157s" + (seq "'" (star (alt (range "^#x27#x5C#xA#xD") ECHAR UCHAR)) "'")) + (terminal STRING_LITERAL_LONG_SINGLE_QUOTE "158s" + (seq "'''" (star (seq (opt (alt "'" "''")) (alt (range "^'\\") ECHAR UCHAR))) "'''")) + (terminal STRING_LITERAL_LONG_QUOTE "159s" + (seq "\"\"\"" (star (seq (opt (alt "\"" "\"\"")) (alt (range "^\"\\") ECHAR UCHAR))) "\"\"\"")) + (terminal UCHAR "35" + (alt (seq "\\u" HEX HEX HEX HEX) (seq "\\U" HEX HEX HEX HEX HEX HEX HEX HEX))) + (terminal ECHAR "160s" (seq "\\" (range "tbnrf\\\"'"))) + (terminal WS "162s" (alt (hex "#x20") (hex "#x9") (hex "#xD") (hex "#xA"))) + (terminal ANON "163s" (seq "[" (star WS) "]")) + (terminal QUICK_VAR_NAME "36" (seq "?" PN_LOCAL)) + (terminal PN_CHARS_BASE "164s" + (alt + (range "A-Z") + (range "a-z") + (range "#x00C0-#x00D6") + (range "#x00D8-#x00F6") + (range "#x00F8-#x02FF") + (range "#x0370-#x037D") + (range "#x037F-#x1FFF") + (range "#x200C-#x200D") + (range "#x2070-#x218F") + (range "#x2C00-#x2FEF") + (range "#x3001-#xD7FF") + (range "#xF900-#xFDCF") + (range "#xFDF0-#xFFFD") + (range "#x10000-#xEFFFF")) ) + (terminal PN_CHARS_U "165s" (alt PN_CHARS_BASE "_")) + (terminal PN_CHARS "167s" + (alt PN_CHARS_U "-" + (range "0-9") + (hex "#x00B7") + (range "#x0300-#x036F") + (range "#x203F-#x2040")) ) + (terminal BASE "37" (seq (alt "B" "b") (alt "A" "a") (alt "S" "s") (alt "E" "e"))) + (terminal PREFIX "38" + (seq (alt "P" "p") (alt "R" "r") (alt "E" "e") (alt "F" "f") (alt "I" "i") (alt "X" "x"))) + (terminal PN_PREFIX "168s" + (seq PN_CHARS_BASE (opt (seq (star (alt PN_CHARS ".")) PN_CHARS)))) + (terminal PN_LOCAL "169s" + (seq + (alt PN_CHARS_U ":" (range "0-9") PLX) + (opt (seq (star (alt PN_CHARS "." ":" PLX)) (alt PN_CHARS ":" PLX)))) ) + (terminal PLX "170s" (alt PERCENT PN_LOCAL_ESC)) + (terminal PERCENT "171s" (seq "%" HEX HEX)) + (terminal HEX "172s" (alt (range "0-9") (range "A-F") (range "a-f"))) + (terminal PN_LOCAL_ESC "173s" + (seq "\\" + (alt "_" "~" "." "-" "!" "$" "&" "'" "(" ")" "*" "+" "," ";" "=" "/" "?" "#" + "@" "%" )) ) + (terminal COMMENT "39" (seq (diff "#" "#x") (star (range "^#xA#xC#xD")))) + (pass _pass (star (alt WS COMMENT)))) diff --git a/etc/notation3.ebnf b/etc/notation3.ebnf deleted file mode 100644 index 13a5a6a..0000000 --- a/etc/notation3.ebnf +++ /dev/null @@ -1,106 +0,0 @@ -# Notation3 Grammar updated with some Turtle poductions and terminals. -# From swap/notation3/notation3.bnf - -[1] document ::= (statement ".")* - -/* Formula does NOT need period on last statement */ - -[2] formulacontent ::= (statement ("." statement)*)? - -[3] statement ::= declaration - | universal - | existential - | simpleStatement - -[4] universal ::= "@forAll" varlist - -[5] existential ::= "@forSome" varlist - -[6] varlist ::= (symbol ("," symbol)*)? - -[7] declaration ::= "@prefix" PNAME_NS IRIREF - | "@keywords" (barename ("," barename)*)? - -[8] barename ::= PNAME_LN -/* barename constraint: no colon */ - -[9] simpleStatement ::= term propertylist - -[10] propertylist ::= (property (";" property)*)? -[11] property ::= (verb | inverb) term ("," term)* - -[12] verb ::= "@has"? term - | "@a" - | "=" - | "=>" - | "<=" - -[12a] inverb ::= "@is" term "@of" - -[13] term ::= pathitem pathtail? - -[14] pathtail ::= ("!" | "^") term - -[15] pathitem ::= symbol - | BLANK_NODE_LABEL - | UVAR - | literal - | "{" formulacontent "}" - | "[" propertylist "]" - | "(" term* ")" - -[13] literal ::= RDFLiteral | NumericLiteral | BooleanLiteral -[16] NumericLiteral ::= INTEGER | DECIMAL | DOUBLE -[128s] RDFLiteral ::= String ( LANGTAG | ( "^^" iri ) )? -[133s] BooleanLiteral ::= "@true" | "@false" -[17t] String ::= STRING_LITERAL_QUOTE | STRING_LITERAL_SINGLE_QUOTE | STRING_LITERAL_LONG_SINGLE_QUOTE | - STRING_LITERAL_LONG_QUOTE - -[18] symbol ::= IRIREF | PNAME_LN - -/***********/ - -@terminals - -[35] UVAR ::= "?" PN_LOCAL - -/* borrowed from SPARQL spec, which excludes newlines and other nastiness */ -[18] IRIREF ::= '<' ([^#x00-#x20<>"{}|^`\] | UCHAR)* '>' -[139s] PNAME_NS ::= PN_PREFIX? ":" -[140s] PNAME_LN ::= PNAME_NS PN_LOCAL -[141s] BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? -[144s] LANGTAG ::= "@" [a-zA-Z]+ ( "-" [a-zA-Z0-9]+ )* -[19] INTEGER ::= [+-]? [0-9]+ -[20] DECIMAL ::= [+-]? ( ([0-9])* '.' ([0-9])+ ) -[21] DOUBLE ::= [+-]? ( [0-9]+ '.' [0-9]* EXPONENT | '.' ([0-9])+ EXPONENT | ([0-9])+ EXPONENT ) -[154s] EXPONENT ::= [eE] [+-]? [0-9]+ -[22] STRING_LITERAL_QUOTE ::= '"' ( [^#x22#x5C#xA#xD] | ECHAR | UCHAR )* '"' -[23] STRING_LITERAL_SINGLE_QUOTE ::= "'" ( [^#x27#x5C#xA#xD] | ECHAR | UCHAR )* "'" -[24] STRING_LITERAL_LONG_SINGLE_QUOTE ::= "'''" ( ( "'" | "''" )? ( [^'\] | ECHAR | UCHAR ) )* "'''" -[25] STRING_LITERAL_LONG_QUOTE ::= '"""' ( ( '"' | '""' )? ( [^"\] | ECHAR | UCHAR ) )* '"""' -[26] UCHAR ::= ( "\u" HEX HEX HEX HEX ) | ( "\U" HEX HEX HEX HEX HEX HEX HEX HEX ) -[159s] ECHAR ::= "\" [tbnrf\"'] - -[163s] PN_CHARS_BASE ::= [A-Z] - | [a-z] - | [#x00C0-#x00D6] - | [#x00D8-#x00F6] - | [#x00F8-#x02FF] - | [#x0370-#x037D] - | [#x037F-#x1FFF] - | [#x200C-#x200D] - | [#x2070-#x218F] - | [#x2C00-#x2FEF] - | [#x3001-#xD7FF] - | [#xF900-#xFDCF] - | [#xFDF0-#xFFFD] - | [#x10000-#xEFFFF] -[164s] PN_CHARS_U ::= PN_CHARS_BASE | '_' -[166s] PN_CHARS ::= PN_CHARS_U | "-" | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040] -[167s] PN_PREFIX ::= PN_CHARS_BASE ( ( PN_CHARS | "." )* PN_CHARS )? -[168s] PN_LOCAL ::= ( PN_CHARS_U | ':' | [0-9] | PLX ) ( ( PN_CHARS | '.' | ':' | PLX )* ( PN_CHARS | ':' | PLX ) ) ? -[169s] PLX ::= PERCENT | PN_LOCAL_ESC -[170s] PERCENT ::= '%' HEX HEX -[42] HEX ::= [0-9] | [A-F] | [a-f] -[172s] PN_LOCAL_ESC ::= '\' ( '_' | '~' | '.' | '-' | '!' | '$' | '&' | "'" | '(' | ')' | '*' | '+' | ',' | ';' | '=' - | '/' | '?' | '#' | '@' | '%' ) \ No newline at end of file diff --git a/etc/notation3.ll1.sxp b/etc/notation3.ll1.sxp deleted file mode 100644 index 8ef9a3e..0000000 --- a/etc/notation3.ll1.sxp +++ /dev/null @@ -1,587 +0,0 @@ -( - (rule _empty "0" (first _eps) (seq)) - (rule document "1" - (start #t) - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow _eof) - (cleanup star) - (alt _empty _document_2)) - (rule _document_1 "1.1" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eof "{" ) - (seq statement ".")) - (rule _document_2 "1.2" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow _eof) - (cleanup merge) - (seq _document_1 document)) - (rule _document_3 "1.3" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow _eof) - (seq document)) - (rule _document_4 "1.4" - (first ".") - (follow "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eof "{" ) - (seq ".")) - (rule formulacontent "2" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow "}") - (cleanup opt) - (alt _empty _formulacontent_1)) - (rule _formulacontent_1 "2.1" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "}") - (seq statement _formulacontent_2)) - (rule _formulacontent_2 "2.2" - (first "." _eps) - (follow "}") - (cleanup star) - (alt _empty _formulacontent_4)) - (rule _formulacontent_3 "2.3" (first ".") (follow "." "}") (seq "." statement)) - (rule _formulacontent_4 "2.4" - (first ".") - (follow "}") - (cleanup merge) - (seq _formulacontent_3 _formulacontent_2)) - (rule _formulacontent_5 "2.5" (first "." _eps) (follow "}") (seq _formulacontent_2)) - (rule _formulacontent_6 "2.6" (first "." _eps) (follow "}") (seq _formulacontent_2)) - (rule _formulacontent_7 "2.7" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." "}") - (seq statement)) - (rule statement "3" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." "}") - (alt declaration universal existential simpleStatement)) - (rule universal "4" (first "@forAll") (follow "." "}") (seq "@forAll" varlist)) - (rule _universal_1 "4.1" (first IRIREF PNAME_LN _eps) (follow "." "}") (seq varlist)) - (rule existential "5" (first "@forSome") (follow "." "}") (seq "@forSome" varlist)) - (rule _existential_1 "5.1" (first IRIREF PNAME_LN _eps) (follow "." "}") (seq varlist)) - (rule varlist "6" - (first IRIREF PNAME_LN _eps) - (follow "." "}") - (cleanup opt) - (alt _empty _varlist_1)) - (rule _varlist_1 "6.1" (first IRIREF PNAME_LN) (follow "." "}") (seq symbol _varlist_2)) - (rule _varlist_2 "6.2" - (first "," _eps) - (follow "." "}") - (cleanup star) - (alt _empty _varlist_4)) - (rule _varlist_3 "6.3" (first ",") (follow "," "." "}") (seq "," symbol)) - (rule _varlist_4 "6.4" - (first ",") - (follow "." "}") - (cleanup merge) - (seq _varlist_3 _varlist_2)) - (rule _varlist_5 "6.5" (first "," _eps) (follow "." "}") (seq _varlist_2)) - (rule _varlist_6 "6.6" (first "," _eps) (follow "." "}") (seq _varlist_2)) - (rule _varlist_7 "6.7" (first IRIREF PNAME_LN) (follow "," "." "}") (seq symbol)) - (rule declaration "7" - (first "@keywords" "@prefix") - (follow "." "}") - (alt _declaration_1 _declaration_2)) - (rule _declaration_1 "7.1" - (first "@prefix") - (follow "." "}") - (seq "@prefix" PNAME_NS IRIREF)) - (rule _declaration_10 "7.10" (first "," _eps) (follow "." "}") (seq _declaration_5)) - (rule _declaration_11 "7.11" (first "," _eps) (follow "." "}") (seq _declaration_5)) - (rule _declaration_12 "7.12" (first PNAME_LN) (follow "," "." "}") (seq barename)) - (rule _declaration_13 "7.13" (first IRIREF) (follow "." "}") (seq IRIREF)) - (rule _declaration_2 "7.2" - (first "@keywords") - (follow "." "}") - (seq "@keywords" _declaration_3)) - (rule _declaration_3 "7.3" - (first PNAME_LN _eps) - (follow "." "}") - (cleanup opt) - (alt _empty _declaration_4)) - (rule _declaration_4 "7.4" - (first PNAME_LN) - (follow "." "}") - (seq barename _declaration_5)) - (rule _declaration_5 "7.5" - (first "," _eps) - (follow "." "}") - (cleanup star) - (alt _empty _declaration_7)) - (rule _declaration_6 "7.6" (first ",") (follow "," "." "}") (seq "," barename)) - (rule _declaration_7 "7.7" - (first ",") - (follow "." "}") - (cleanup merge) - (seq _declaration_6 _declaration_5)) - (rule _declaration_8 "7.8" (first PNAME_NS) (follow "." "}") (seq PNAME_NS IRIREF)) - (rule _declaration_9 "7.9" (first PNAME_LN _eps) (follow "." "}") (seq _declaration_3)) - (rule barename "8" (first PNAME_LN) (follow "," "." "}") (seq PNAME_LN)) - (rule simpleStatement "9" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." "}") - (seq term propertylist)) - (rule _simpleStatement_1 "9.1" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow "." "}") - (seq propertylist)) - (rule propertylist "10" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow "." "]" "}") - (cleanup opt) - (alt _empty _propertylist_1)) - (rule _propertylist_1 "10.1" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." "]" "}") - (seq property _propertylist_2)) - (rule _propertylist_2 "10.2" - (first ";" _eps) - (follow "." "]" "}") - (cleanup star) - (alt _empty _propertylist_4)) - (rule _propertylist_3 "10.3" (first ";") (follow "." ";" "]" "}") (seq ";" property)) - (rule _propertylist_4 "10.4" - (first ";") - (follow "." "]" "}") - (cleanup merge) - (seq _propertylist_3 _propertylist_2)) - (rule _propertylist_5 "10.5" - (first ";" _eps) - (follow "." "]" "}") - (seq _propertylist_2)) - (rule _propertylist_6 "10.6" - (first ";" _eps) - (follow "." "]" "}") - (seq _propertylist_2)) - (rule _propertylist_7 "10.7" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." ";" "]" "}") - (seq property)) - (rule property "11" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." ";" "]" "}") - (seq _property_1 term _property_2)) - (rule _property_1 "11.1" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (alt verb inverb)) - (rule _property_2 "11.2" - (first "," _eps) - (follow "." ";" "]" "}") - (cleanup star) - (alt _empty _property_4)) - (rule _property_3 "11.3" (first ",") (follow "," "." ";" "]" "}") (seq "," term)) - (rule _property_4 "11.4" - (first ",") - (follow "." ";" "]" "}") - (cleanup merge) - (seq _property_3 _property_2)) - (rule _property_5 "11.5" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "." ";" "]" "}") - (seq term _property_2)) - (rule _property_6 "11.6" (first "," _eps) (follow "." ";" "]" "}") (seq _property_2)) - (rule _property_7 "11.7" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "," "." ";" "]" "}") - (seq term)) - (rule verb "12" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (alt _verb_1 "@a" "=" "=>" "<=")) - (rule inverb "12a" - (first "@is") - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (seq "@is" term "@of")) - (rule _inverb_2 "12a.2" - (first "@of") - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (seq "@of")) - (rule _inverb_1 "12a.1" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (seq term "@of")) - (rule _verb_1 "12.1" - (first "(" "@false" "@has" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER - IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (seq _verb_2 term)) - (rule _verb_2 "12.2" - (first "@has" _eps) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (cleanup opt) - (alt _empty "@has")) - (rule _verb_3 "12.3" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (seq term)) - (rule literal "13" - (first "@false" "@true" DECIMAL DOUBLE INTEGER STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt RDFLiteral NumericLiteral BooleanLiteral)) - (rule term "13" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@of" - "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" "}" ) - (seq pathitem _term_1)) - (rule _term_1 "13.1" - (first "!" "^" _eps) - (follow "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@of" - "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" "}" ) - (cleanup opt) - (alt _empty pathtail)) - (rule _term_2 "13.2" - (first "!" "^" _eps) - (follow "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@of" - "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" "}" ) - (seq _term_1)) - (rule pathtail "14" - (first "!" "^") - (follow "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@of" - "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" "}" ) - (seq _pathtail_1 term)) - (rule _pathtail_1 "14.1" - (first "!" "^") - (follow "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (alt "!" "^")) - (rule _pathtail_2 "14.2" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@of" - "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" "}" ) - (seq term)) - (rule pathitem "15" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt symbol BLANK_NODE_LABEL UVAR literal _pathitem_1 _pathitem_2 _pathitem_3)) - (rule _pathitem_1 "15.1" - (first "{") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "{" formulacontent "}")) - (rule _pathitem_10 "15.10" - (first "}") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "}")) - (rule _pathitem_11 "15.11" - (first "]") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "]")) - (rule _pathitem_12 "15.12" - (first ")") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq ")")) - (rule _pathitem_2 "15.2" - (first "[") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "[" propertylist "]")) - (rule _pathitem_3 "15.3" - (first "(") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "(" _pathitem_4 ")")) - (rule _pathitem_4 "15.4" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow ")") - (cleanup star) - (alt _empty _pathitem_5)) - (rule _pathitem_5 "15.5" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow ")") - (cleanup merge) - (seq term _pathitem_4)) - (rule _pathitem_6 "15.6" - (first "(" "@false" "@forAll" "@forSome" "@keywords" "@prefix" "@true" - BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" "}" ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq formulacontent "}")) - (rule _pathitem_7 "15.7" - (first "(" "<=" "=" "=>" "@a" "@false" "@has" "@is" "@true" BLANK_NODE_LABEL - DECIMAL DOUBLE INTEGER IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_QUOTE - STRING_LITERAL_SINGLE_QUOTE "[" "]" "{" ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq propertylist "]")) - (rule _pathitem_8 "15.8" - (first "(" ")" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER - IRIREF PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "{" ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq _pathitem_4 ")")) - (rule _pathitem_9 "15.9" - (first "(" "@false" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" _eps "{" ) - (follow ")") - (seq _pathitem_4)) - (rule NumericLiteral "16" - (first DECIMAL DOUBLE INTEGER) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt INTEGER DECIMAL DOUBLE)) - (rule String "17t" - (first STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF LANGTAG - PNAME_LN STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "^^" "{" "}" ) - (alt STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE - STRING_LITERAL_LONG_SINGLE_QUOTE STRING_LITERAL_LONG_QUOTE )) - (rule symbol "18" - (first IRIREF PNAME_LN) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt IRIREF PNAME_LN)) - (terminal IRIREF "18" (seq "<" (range "^#x00-#x20<>\"{}|^`] | UCHAR)* '>'"))) - (terminal INTEGER "19" (seq (opt (range "+-")) (plus (range "0-9")))) - (terminal DECIMAL "20" - (seq (opt (range "+-")) (seq (star (range "0-9")) "." (plus (range "0-9"))))) - (terminal DOUBLE "21" - (seq - (opt (range "+-")) - (alt - (seq (plus (range "0-9")) "." (star (range "0-9")) EXPONENT) - (seq "." (plus (range "0-9")) EXPONENT) - (seq (plus (range "0-9")) EXPONENT)) )) - (terminal STRING_LITERAL_QUOTE "22" - (seq "\"" (star (alt (range "^#x22#x5C#xA#xD") ECHAR UCHAR)) "\"")) - (terminal STRING_LITERAL_SINGLE_QUOTE "23" - (seq "'" (star (alt (range "^#x27#x5C#xA#xD") ECHAR UCHAR)) "'")) - (terminal STRING_LITERAL_LONG_SINGLE_QUOTE "24" - (seq "'''" (seq (opt (alt "'" "''")) (range "^'] | ECHAR | UCHAR ))* \"'''\"")))) - (terminal STRING_LITERAL_LONG_QUOTE "25" - (seq "\"\"\"" (seq (opt (alt "\"" "\"\"")) (range "^\"] | ECHAR | UCHAR ))* '\"\"\"'")))) - (terminal UCHAR "26" - (alt (seq "u" HEX HEX HEX HEX) (seq "U" HEX HEX HEX HEX HEX HEX HEX HEX))) - (terminal HEX "42" (alt (range "0-9") (range "A-F") (range "a-f"))) - (rule _RDFLiteral_5 "128s.5" - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq iri)) - (rule _RDFLiteral_4 "128s.4" - (first LANGTAG "^^" _eps) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq _RDFLiteral_1)) - (rule RDFLiteral "128s" - (first STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE ) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq String _RDFLiteral_1)) - (rule _RDFLiteral_1 "128s.1" - (first LANGTAG "^^" _eps) - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (cleanup opt) - (alt _empty _RDFLiteral_2)) - (rule _RDFLiteral_3 "128s.3" - (first "^^") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (seq "^^" iri)) - (rule _RDFLiteral_2 "128s.2" - (first LANGTAG "^^") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt LANGTAG _RDFLiteral_3)) - (rule BooleanLiteral "133s" - (first "@false" "@true") - (follow "!" "(" ")" "," "." ";" "<=" "=" "=>" "@a" "@false" "@has" "@is" - "@of" "@true" BLANK_NODE_LABEL DECIMAL DOUBLE INTEGER IRIREF PNAME_LN - STRING_LITERAL_LONG_QUOTE STRING_LITERAL_LONG_SINGLE_QUOTE - STRING_LITERAL_QUOTE STRING_LITERAL_SINGLE_QUOTE "[" "]" "^" "{" "}" ) - (alt "@true" "@false")) - (terminal PNAME_NS "139s" (seq (opt PN_PREFIX) ":")) - (terminal PNAME_LN "140s" (seq PNAME_NS PN_LOCAL)) - (terminal BLANK_NODE_LABEL "141s" - (seq "_:" (alt PN_CHARS_U (range "0-9")) (opt (seq (star (alt PN_CHARS ".")) PN_CHARS)))) - (terminal LANGTAG "144s" - (seq "@" (plus (range "a-zA-Z")) (star (seq "-" (plus (range "a-zA-Z0-9")))))) - (terminal EXPONENT "154s" (seq (range "eE") (opt (range "+-")) (plus (range "0-9")))) - (terminal ECHAR "159s" (seq "\\" (range "tbnrf\"'"))) - (terminal PN_CHARS_BASE "163s" - (alt - (range "A-Z") - (range "a-z") - (range "#x00C0-#x00D6") - (range "#x00D8-#x00F6") - (range "#x00F8-#x02FF") - (range "#x0370-#x037D") - (range "#x037F-#x1FFF") - (range "#x200C-#x200D") - (range "#x2070-#x218F") - (range "#x2C00-#x2FEF") - (range "#x3001-#xD7FF") - (range "#xF900-#xFDCF") - (range "#xFDF0-#xFFFD") - (range "#x10000-#xEFFFF")) ) - (terminal PN_CHARS_U "164s" (alt PN_CHARS_BASE "_")) - (terminal PN_CHARS "166s" - (alt PN_CHARS_U "-" - (range "0-9") - (hex "#x00B7") - (range "#x0300-#x036F") - (range "#x203F-#x2040")) ) - (terminal PN_PREFIX "167s" - (seq PN_CHARS_BASE (opt (seq (star (alt PN_CHARS ".")) PN_CHARS)))) - (terminal PN_LOCAL "168s" - (seq - (alt PN_CHARS_U ":" (range "0-9") PLX) - (opt (seq (star (alt PN_CHARS "." ":" PLX)) (alt PN_CHARS ":" PLX)))) ) - (terminal PLX "169s" (alt PERCENT PN_LOCAL_ESC)) - (terminal PERCENT "170s" (seq "%" HEX HEX)) - (terminal PN_LOCAL_ESC "172s" - (seq "\\" - (alt "_" "~" "." "-" "!" "$" "&" "'" "(" ")" "*" "+" "," ";" "=" "/" "?" "#" - "@" "%" )) )) diff --git a/examples/example-1.n3 b/examples/example-1.n3 index e039c35..869cbd9 100644 --- a/examples/example-1.n3 +++ b/examples/example-1.n3 @@ -1,5 +1,5 @@ @prefix log: . -@keywords. +# @keywords. @forAll x, y, z. {x parent y. y sister z} log:implies {x aunt z}. # This N3 formula has three universally quantified variables and one statement. The subject of the statement, diff --git a/examples/example-2.n3 b/examples/example-2.n3 index fdcc04d..933d0d3 100644 --- a/examples/example-2.n3 +++ b/examples/example-2.n3 @@ -1,4 +1,4 @@ -@keywords. +# @keywords. @forAll x, y, z. { x wrote y. y log:includes {z weather w}. diff --git a/examples/gk-list-issue-12.n3 b/examples/gk-list-issue-12.n3 new file mode 100644 index 0000000..e587629 --- /dev/null +++ b/examples/gk-list-issue-12.n3 @@ -0,0 +1,11 @@ +@prefix rdf: . +@prefix list: . +@prefix : . + +{ (1 2 3) list:last ?x } => { :lastMember :is ?x } . + +{ + _:l3 rdf:first 3; rdf:rest rdf:nil . + _:l2 rdf:first 2; rdf:rest _:l3 . + _:l1 rdf:first 1; rdf:rest _:l2; list:last ?y +} => { :lastMember2 :is ?y } . diff --git a/examples/jos-list-issue-12.n3 b/examples/jos-list-issue-12.n3 new file mode 100644 index 0000000..8c7bed3 --- /dev/null +++ b/examples/jos-list-issue-12.n3 @@ -0,0 +1,11 @@ +@prefix rdf: . +@prefix list: . +@prefix : . + +_:list1 rdf:first :a; rdf:rest _:l2. +_:l2 rdf:first :b; rdf:rest _:l3. +_:l3 rdf:first :c; rdf:rest rdf:nil. +_:list1 :p :o. +(1 2 3) :p :o. + +{?LIST :p :o; rdf:first ?FIRST; list:last ?LAST} => {?LIST :first_element ?FIRST; :last_element ?LAST}. diff --git a/examples/pchampin-issue-56.n3 b/examples/pchampin-issue-56.n3 new file mode 100644 index 0000000..0329869 --- /dev/null +++ b/examples/pchampin-issue-56.n3 @@ -0,0 +1,3 @@ +:captain :age _:a. +(39 3) math:sum _:a. +{ :captain :age 42 } => { :test1 a :Success }. diff --git a/lib/rdf/n3.rb b/lib/rdf/n3.rb index 1f948fc..b8ccbe4 100644 --- a/lib/rdf/n3.rb +++ b/lib/rdf/n3.rb @@ -1,4 +1,3 @@ -$:.unshift(File.expand_path(File.join(File.dirname(__FILE__), '..'))) require 'rdf' module RDF @@ -24,12 +23,13 @@ module N3 require 'rdf/n3/format' require 'rdf/n3/vocab' require 'rdf/n3/extensions' - require 'rdf/n3/patches/array_hacks' - autoload :Meta, 'rdf/n3/reader/meta' - autoload :Parser, 'rdf/n3/reader/parser' - autoload :Reader, 'rdf/n3/reader' - autoload :Reasoner, 'rdf/n3/reasoner' - autoload :VERSION, 'rdf/n3/version' - autoload :Writer, 'rdf/n3/writer' + require 'rdf/n3/refinements' + autoload :List, 'rdf/n3/list' + autoload :Reader, 'rdf/n3/reader' + autoload :Reasoner, 'rdf/n3/reasoner' + autoload :Repository, 'rdf/n3/repository' + autoload :Terminals, 'rdf/n3/terminals' + autoload :VERSION, 'rdf/n3/version' + autoload :Writer, 'rdf/n3/writer' end end \ No newline at end of file diff --git a/lib/rdf/n3/algebra.rb b/lib/rdf/n3/algebra.rb index 8b4300c..17319a9 100644 --- a/lib/rdf/n3/algebra.rb +++ b/lib/rdf/n3/algebra.rb @@ -7,115 +7,194 @@ module RDF::N3 # # @author [Gregg Kellogg](http://greggkellogg.net/) module Algebra + autoload :Builtin, 'rdf/n3/algebra/builtin' autoload :Formula, 'rdf/n3/algebra/formula' + autoload :ListOperator, 'rdf/n3/algebra/list_operator' + autoload :NotImplemented, 'rdf/n3/algebra/not_implemented' + autoload :ResourceOperator, 'rdf/n3/algebra/resource_operator' module List + def vocab; RDF::N3::List.to_uri; end + module_function :vocab autoload :Append, 'rdf/n3/algebra/list/append' + autoload :First, 'rdf/n3/algebra/list/first' autoload :In, 'rdf/n3/algebra/list/in' autoload :Last, 'rdf/n3/algebra/list/last' + autoload :Length, 'rdf/n3/algebra/list/length' autoload :Member, 'rdf/n3/algebra/list/member' end module Log + def vocab; RDF::N3::Log.to_uri; end + module_function :vocab autoload :Conclusion, 'rdf/n3/algebra/log/conclusion' autoload :Conjunction, 'rdf/n3/algebra/log/conjunction' - autoload :EqualTo, 'rdf/n3/algebra/log/equalTo' + autoload :Content, 'rdf/n3/algebra/log/content' + autoload :EqualTo, 'rdf/n3/algebra/log/equal_to' autoload :Implies, 'rdf/n3/algebra/log/implies' autoload :Includes, 'rdf/n3/algebra/log/includes' - autoload :NotEqualTo, 'rdf/n3/algebra/log/notEqualTo' - autoload :NotIncludes, 'rdf/n3/algebra/log/notIncludes' - autoload :OutputString, 'rdf/n3/algebra/log/outputString' + autoload :N3String, 'rdf/n3/algebra/log/n3_string' + autoload :NotEqualTo, 'rdf/n3/algebra/log/not_equal_to' + autoload :NotIncludes, 'rdf/n3/algebra/log/not_includes' + autoload :OutputString, 'rdf/n3/algebra/log/output_string' + autoload :ParsedAsN3, 'rdf/n3/algebra/log/parsed_as_n3' + autoload :Semantics, 'rdf/n3/algebra/log/semantics' end module Math - autoload :AbsoluteValue, 'rdf/n3/algebra/math/absoluteValue' + def vocab; RDF::N3::Math.to_uri; end + module_function :vocab + autoload :AbsoluteValue, 'rdf/n3/algebra/math/absolute_value' + autoload :ACos, 'rdf/n3/algebra/math/acos' + autoload :ASin, 'rdf/n3/algebra/math/asin' + autoload :ATan, 'rdf/n3/algebra/math/atan' + autoload :ACosH, 'rdf/n3/algebra/math/acosh' + autoload :ASinH, 'rdf/n3/algebra/math/asinh' + autoload :ATanH, 'rdf/n3/algebra/math/atanh' + autoload :Ceiling, 'rdf/n3/algebra/math/ceiling' + autoload :Cos, 'rdf/n3/algebra/math/cos' + autoload :CosH, 'rdf/n3/algebra/math/cosh' autoload :Difference, 'rdf/n3/algebra/math/difference' - autoload :EqualTo, 'rdf/n3/algebra/math/equalTo' + autoload :EqualTo, 'rdf/n3/algebra/math/equal_to' autoload :Exponentiation, 'rdf/n3/algebra/math/exponentiation' - autoload :GreaterThan, 'rdf/n3/algebra/math/greaterThan' - autoload :IntegerQuotient, 'rdf/n3/algebra/math/integerQuotient' - autoload :LessThan, 'rdf/n3/algebra/math/lessThan' - autoload :MemberCount, 'rdf/n3/algebra/math/memberCount' + autoload :Floor, 'rdf/n3/algebra/math/floor' + autoload :GreaterThan, 'rdf/n3/algebra/math/greater_than' + autoload :LessThan, 'rdf/n3/algebra/math/less_than' autoload :Negation, 'rdf/n3/algebra/math/negation' - autoload :NotEqualTo, 'rdf/n3/algebra/math/notEqualTo' - autoload :NotGreaterThan, 'rdf/n3/algebra/math/notGreaterThan' - autoload :NotLessThan, 'rdf/n3/algebra/math/notLessThan' + autoload :NotEqualTo, 'rdf/n3/algebra/math/not_equal_to' + autoload :NotGreaterThan, 'rdf/n3/algebra/math/not_greater_than' + autoload :NotLessThan, 'rdf/n3/algebra/math/not_less_than' autoload :Product, 'rdf/n3/algebra/math/product' autoload :Quotient, 'rdf/n3/algebra/math/quotient' autoload :Remainder, 'rdf/n3/algebra/math/remainder' autoload :Rounded, 'rdf/n3/algebra/math/rounded' + autoload :Sin, 'rdf/n3/algebra/math/sin' + autoload :SinH, 'rdf/n3/algebra/math/sinh' autoload :Sum, 'rdf/n3/algebra/math/sum' + autoload :Tan, 'rdf/n3/algebra/math/tan' + autoload :TanH, 'rdf/n3/algebra/math/tanh' end module Str + def vocab; RDF::N3::Str.to_uri; end + module_function :vocab autoload :Concatenation, 'rdf/n3/algebra/str/concatenation' autoload :Contains, 'rdf/n3/algebra/str/contains' - autoload :ContainsIgnoringCase, 'rdf/n3/algebra/str/containsIgnoringCase' - autoload :EndsWith, 'rdf/n3/algebra/str/endsWith' - autoload :EqualIgnoringCase, 'rdf/n3/algebra/str/equalIgnoringCase' + autoload :ContainsIgnoringCase, 'rdf/n3/algebra/str/contains_ignoring_case' + autoload :EndsWith, 'rdf/n3/algebra/str/ends_with' + autoload :EqualIgnoringCase, 'rdf/n3/algebra/str/equal_ignoring_case' autoload :Format, 'rdf/n3/algebra/str/format' - autoload :GreaterThan, 'rdf/n3/algebra/str/greaterThan' - autoload :LessThan, 'rdf/n3/algebra/str/lessThan' + autoload :GreaterThan, 'rdf/n3/algebra/str/greater_than' + autoload :LessThan, 'rdf/n3/algebra/str/less_than' autoload :Matches, 'rdf/n3/algebra/str/matches' - autoload :NotEqualIgnoringCase, 'rdf/n3/algebra/str/notEqualIgnoringCase' - autoload :NotGreaterThan, 'rdf/n3/algebra/str/notGreaterThan' - autoload :NotLessThan, 'rdf/n3/algebra/str/notLessThan' - autoload :NotMatches, 'rdf/n3/algebra/str/notMatches' + autoload :NotEqualIgnoringCase, 'rdf/n3/algebra/str/not_equal_ignoring_case' + autoload :NotGreaterThan, 'rdf/n3/algebra/str/not_greater_than' + autoload :NotLessThan, 'rdf/n3/algebra/str/not_less_than' + autoload :NotMatches, 'rdf/n3/algebra/str/not_matches' autoload :Replace, 'rdf/n3/algebra/str/replace' autoload :Scrape, 'rdf/n3/algebra/str/scrape' - autoload :StartsWith, 'rdf/n3/algebra/str/startsWith' + autoload :StartsWith, 'rdf/n3/algebra/str/starts_with' + end + + module Time + def vocab; RDF::N3::Time.to_uri; end + module_function :vocab + autoload :DayOfWeek, 'rdf/n3/algebra/time/day_of_week' + autoload :Day, 'rdf/n3/algebra/time/day' + autoload :GmTime, 'rdf/n3/algebra/time/gm_time' + autoload :Hour, 'rdf/n3/algebra/time/hour' + autoload :InSeconds, 'rdf/n3/algebra/time/in_seconds' + autoload :LocalTime, 'rdf/n3/algebra/time/local_time' + autoload :Minute, 'rdf/n3/algebra/time/minute' + autoload :Month, 'rdf/n3/algebra/time/month' + autoload :Second, 'rdf/n3/algebra/time/second' + autoload :Timezone, 'rdf/n3/algebra/time/timezone' + autoload :Year, 'rdf/n3/algebra/time/year' end def for(uri) { - RDF::N3::List.append => List::Append, - RDF::N3::List.in => List::In, - RDF::N3::List.last => List::Last, - RDF::N3::List.member => List::Member, + RDF::N3::List.append => List.const_get(:Append), + RDF::N3::List.first => List.const_get(:First), + RDF::N3::List.in => List.const_get(:In), + RDF::N3::List.last => List.const_get(:Last), + RDF::N3::List.length => List.const_get(:Length), + RDF::N3::List.member => List.const_get(:Member), + + RDF::N3::Log.conclusion => Log.const_get(:Conclusion), + RDF::N3::Log.conjunction => Log.const_get(:Conjunction), + RDF::N3::Log.content => Log.const_get(:Content), + RDF::N3::Log.equalTo => Log.const_get(:EqualTo), + RDF::N3::Log.implies => Log.const_get(:Implies), + RDF::N3::Log.includes => Log.const_get(:Includes), + RDF::N3::Log.n3String => Log.const_get(:N3String), + RDF::N3::Log.notEqualTo => Log.const_get(:NotEqualTo), + RDF::N3::Log.notIncludes => Log.const_get(:NotIncludes), + RDF::N3::Log.outputString => Log.const_get(:OutputString), + RDF::N3::Log.parsedAsN3 => Log.const_get(:ParsedAsN3), + RDF::N3::Log.semantics => Log.const_get(:Semantics), + RDF::N3::Log.supports => NotImplemented, - RDF::N3::Log.conclusion => Log::Conclusion, - RDF::N3::Log.conjunction => Log::Conjunction, - RDF::N3::Log.equalTo => Log::EqualTo, - RDF::N3::Log.implies => Log::Implies, - RDF::N3::Log.includes => Log::Includes, - RDF::N3::Log.notEqualTo => Log::NotEqualTo, - RDF::N3::Log.notIncludes => Log::NotIncludes, - RDF::N3::Log.outputString => Log::OutputString, + RDF::N3::Math.absoluteValue => Math.const_get(:AbsoluteValue), + RDF::N3::Math.acos => Math.const_get(:ACos), + RDF::N3::Math.asin => Math.const_get(:ASin), + RDF::N3::Math.atan => Math.const_get(:ATan), + RDF::N3::Math.acosh => Math.const_get(:ACosH), + RDF::N3::Math.asinh => Math.const_get(:ASinH), + RDF::N3::Math.atanh => Math.const_get(:ATanH), + RDF::N3::Math.ceiling => Math.const_get(:Ceiling), + RDF::N3::Math.ceiling => Math.const_get(:Ceiling), + RDF::N3::Math.cos => Math.const_get(:Cos), + RDF::N3::Math.cosh => Math.const_get(:CosH), + RDF::N3::Math.difference => Math.const_get(:Difference), + RDF::N3::Math.equalTo => Math.const_get(:EqualTo), + RDF::N3::Math.exponentiation => Math.const_get(:Exponentiation), + RDF::N3::Math.floor => Math.const_get(:Floor), + RDF::N3::Math.greaterThan => Math.const_get(:GreaterThan), + RDF::N3::Math.lessThan => Math.const_get(:LessThan), + RDF::N3::Math.negation => Math.const_get(:Negation), + RDF::N3::Math.notEqualTo => Math.const_get(:NotEqualTo), + RDF::N3::Math.notGreaterThan => Math.const_get(:NotGreaterThan), + RDF::N3::Math.notLessThan => Math.const_get(:NotLessThan), + RDF::N3::Math.product => Math.const_get(:Product), + RDF::N3::Math.quotient => Math.const_get(:Quotient), + RDF::N3::Math.remainder => Math.const_get(:Remainder), + RDF::N3::Math.rounded => Math.const_get(:Rounded), + RDF::N3::Math.sin => Math.const_get(:Sin), + RDF::N3::Math.sinh => Math.const_get(:SinH), + RDF::N3::Math.tan => Math.const_get(:Tan), + RDF::N3::Math.tanh => Math.const_get(:TanH), + RDF::N3::Math[:sum] => Math.const_get(:Sum), - RDF::N3::Math.absoluteValue => Math::AbsoluteValue, - RDF::N3::Math.difference => Math::Difference, - RDF::N3::Math.equalTo => Math::EqualTo, - RDF::N3::Math.exponentiation => Math::Exponentiation, - RDF::N3::Math.greaterThan => Math::GreaterThan, - RDF::N3::Math.integerQuotient => Math::IntegerQuotient, - RDF::N3::Math.lessThan => Math::LessThan, - RDF::N3::Math.memberCount => Math::MemberCount, - RDF::N3::Math.negation => Math::Negation, - RDF::N3::Math.notEqualTo => Math::NotEqualTo, - RDF::N3::Math.notGreaterThan => Math::NotGreaterThan, - RDF::N3::Math.notLessThan => Math::NotLessThan, - RDF::N3::Math.product => Math::Product, - RDF::N3::Math.quotient => Math::Quotient, - RDF::N3::Math.remainder => Math::Remainder, - RDF::N3::Math.rounded => Math::Rounded, - RDF::N3::Math.sum => Math::Sum, + RDF::N3::Str.concatenation => Str.const_get(:Concatenation), + RDF::N3::Str.contains => Str.const_get(:Contains), + RDF::N3::Str.containsIgnoringCase => Str.const_get(:ContainsIgnoringCase), + RDF::N3::Str.containsRoughly => NotImplemented, + RDF::N3::Str.endsWith => Str.const_get(:EndsWith), + RDF::N3::Str.equalIgnoringCase => Str.const_get(:EqualIgnoringCase), + RDF::N3::Str.format => Str.const_get(:Format), + RDF::N3::Str.greaterThan => Str.const_get(:GreaterThan), + RDF::N3::Str.lessThan => Str.const_get(:LessThan), + RDF::N3::Str.matches => Str.const_get(:Matches), + RDF::N3::Str.notEqualIgnoringCase => Str.const_get(:NotEqualIgnoringCase), + RDF::N3::Str.notGreaterThan => Str.const_get(:NotGreaterThan), + RDF::N3::Str.notLessThan => Str.const_get(:NotLessThan), + RDF::N3::Str.notMatches => Str.const_get(:NotMatches), + RDF::N3::Str.replace => Str.const_get(:Replace), + RDF::N3::Str.scrape => Str.const_get(:Scrape), + RDF::N3::Str.startsWith => Str.const_get(:StartsWith), - RDF::N3::Str.concatenation => Str::Concatenation, - RDF::N3::Str.contains => Str::Contains, - RDF::N3::Str.containsIgnoringCase => Str::ContainsIgnoringCase, - RDF::N3::Str.endsWith => Str::EndsWith, - RDF::N3::Str.equalIgnoringCase => Str::EqualIgnoringCase, - RDF::N3::Str.format => Str::Format, - RDF::N3::Str.greaterThan => Str::GreaterThan, - RDF::N3::Str.lessThan => Str::LessThan, - RDF::N3::Str.matches => Str::Matches, - RDF::N3::Str.notEqualIgnoringCase => Str::NotEqualIgnoringCase, - RDF::N3::Str.notGreaterThan => Str::NotGreaterThan, - RDF::N3::Str.notLessThan => Str::NotLessThan, - RDF::N3::Str.notMatches => Str::NotMatches, - RDF::N3::Str.replace => Str::Replace, - RDF::N3::Str.scrape => Str::Scrape, - RDF::N3::Str.startsWith => Str::StartsWith, + RDF::N3::Time.dayOfWeek => Time.const_get(:DayOfWeek), + RDF::N3::Time.day => Time.const_get(:Day), + RDF::N3::Time.gmTime => Time.const_get(:GmTime), + RDF::N3::Time.hour => Time.const_get(:Hour), + RDF::N3::Time.inSeconds => Time.const_get(:InSeconds), + RDF::N3::Time.localTime => Time.const_get(:LocalTime), + RDF::N3::Time.minute => Time.const_get(:Minute), + RDF::N3::Time.month => Time.const_get(:Month), + RDF::N3::Time.second => Time.const_get(:Second), + RDF::N3::Time.timeZone => Time.const_get(:Timezone), + RDF::N3::Time.year => Time.const_get(:Year), }[uri] end module_function :for diff --git a/lib/rdf/n3/algebra/builtin.rb b/lib/rdf/n3/algebra/builtin.rb new file mode 100644 index 0000000..4e1e743 --- /dev/null +++ b/lib/rdf/n3/algebra/builtin.rb @@ -0,0 +1,79 @@ +require 'rdf/n3' + +module RDF::N3::Algebra + ## + # Behavior for N3 builtin operators + module Builtin + include RDF::Enumerable + include RDF::Util::Logger + + ## + # Determine ordering for running built-in operator considering if subject or object is varaible and considered an input or an output. Accepts a solution set to determine if variable inputs are bound. + # + # @param [RDF::Query::Solutions] solutions + # @return [Integer] rake for ordering, lower numbers have fewer unbound output variables. + def rank(solutions) + vars = input_operand.vars - solutions.variable_names + # The rank is the remaining unbound variables + vars.count + end + + ## + # Return subject or object operand, or both, depending on which is considered an input. + # + # @return [RDF::Term] + def input_operand + # By default, return the merger of input and output operands + RDF::N3::List.new(values: operands) + end + + ## + # Evaluates the builtin using the given variable `bindings` by cloning the builtin replacing variables with their bindings recursively. + # + # @param [Hash{Symbol => RDF::Term}] bindings + # a query solution containing zero or more variable bindings + # @param [Hash{Symbol => Object}] options ({}) + # options passed from query + # @return [RDF::N3::Algebra::Builtin] + # Returns a new builtin with bound values. + # @see SPARQL::Algebra::Expression.evaluate + def evaluate(bindings, formulae:, **options) + args = operands.map { |operand| operand.evaluate(bindings, formulae: formulae, **options) } + # Replace operands with bound operands + self.class.new(*args, formulae: formulae, **options) + end + + ## + # By default, operators yield themselves and the operands, recursively. + # + # Pass in solutions to have quantifiers resolved to those solutions. + def each(solutions: RDF::Query::Solutions(), &block) + log_debug("(#{self.class.const_get(:NAME)} each)") + log_depth do + subject, object = operands.map {|op| op.formula? ? op.graph_name : op} + block.call(RDF::Statement(subject, self.to_uri, object)) + operands.each do |op| + next unless op.is_a?(Builtin) + op.each(solutions: solutions) do |st| + # Maintain formula graph name for formula operands + st.graph_name ||= op.graph_name if op.formula? + block.call(st) + end + end + end + end + + ## + # The builtin hash is the hash of it's operands and NAME. + # + # @see RDF::Value#hash + def hash + ([self.class.const_get(:NAME)] + operands).hash + end + + # The URI of this operator. + def to_uri + self.class.const_get(:URI) + end + end +end diff --git a/lib/rdf/n3/algebra/formula.rb b/lib/rdf/n3/algebra/formula.rb index 03b44f1..3ada833 100644 --- a/lib/rdf/n3/algebra/formula.rb +++ b/lib/rdf/n3/algebra/formula.rb @@ -1,17 +1,110 @@ -require 'rdf' +require 'rdf/n3' module RDF::N3::Algebra # # A Notation3 Formula combines a graph with a BGP query. class Formula < SPARQL::Algebra::Operator + include RDF::Term + include RDF::Enumerable include SPARQL::Algebra::Query include SPARQL::Algebra::Update - include RDF::Enumerable - include RDF::Util::Logger + include RDF::N3::Algebra::Builtin + ## + # Query to run against a queryable to determine if the formula matches the queryable. + # + # @return [RDF::Query] attr_accessor :query - NAME = [:formula] + NAME = :formula + + ## + # Create a formula from an RDF::Enumerable (such as RDF::N3::Repository) + # + # @param [RDF::Enumerable] enumerable + # @param [Hash{Symbol => Object}] options + # any additional keyword options + # @return [RDF::N3::Algebra::Formula] + def self.from_enumerable(enumerable, **options) + # SPARQL used for SSE and algebra functionality + require 'sparql' unless defined?(:SPARQL) + + # Create formulae from statement graph_names + formulae = {} + enumerable.graph_names.unshift(nil).each do |graph_name| + formulae[graph_name] = Formula.new(graph_name: graph_name, formulae: formulae, **options) + end + + # Add patterns to appropiate formula based on graph_name, + # and replace subject and object bnodes which identify + # named graphs with those formula + enumerable.each_statement do |statement| + # A graph name indicates a formula. + graph_name = statement.graph_name + form = formulae[graph_name] + + # Map statement components to formulae, if necessary. + statement = RDF::Statement.from(statement.to_a.map do |term| + case term + when RDF::Node + term = if formulae[term] + # Transform blank nodes denoting formulae into those formulae + formulae[term] + elsif graph_name + # If we're in a quoted graph, transform blank nodes into undistinguished existential variables. + term.to_ndvar(graph_name) + else + term + end + when RDF::N3::List + # Transform blank nodes denoting formulae into those formulae + term = term.transform {|t| t.node? ? formulae.fetch(t, t) : t} + + # If we're in a quoted graph, transform blank node components into existential variables + if graph_name && term.has_nodes? + term = term.to_ndvar(graph_name) + end + end + term + end) + + pattern = statement.variable? ? RDF::Query::Pattern.from(statement) : statement + + # Formulae may be the subject or object of a known operator + if klass = RDF::N3::Algebra.for(pattern.predicate) + form.operands << klass.new(pattern.subject, + pattern.object, + formulae: formulae, + parent: form, + predicate: pattern.predicate, + **options) + else + pattern.graph_name = nil + form.operands << pattern + end + end + + # Formula is that without a graph name + this = formulae[nil] + + # If assigned a graph name, add it here + this.graph_name = options[:graph_name] if options[:graph_name] + this + end + + ## + # Duplicate this formula, recursively, renaming graph names using hash function. + # + # @return [RDF::N3::Algebra::Formula] + def deep_dup + #new_ops = operands.map(&:dup) + new_ops = operands.map do |op| + op.deep_dup + end + graph_name = RDF::Node.intern(new_ops.hash) + log_debug("formula") {"dup: #{self.graph_name} to #{graph_name}"} + self.class.new(*new_ops, **@options.merge(graph_name: graph_name, formulae: formulae)) + end ## # Yields solutions from patterns and other operands. Solutions are created by evaluating each pattern and other sub-operand against `queryable`. @@ -20,40 +113,117 @@ class Formula < SPARQL::Algebra::Operator # # @param [RDF::Queryable] queryable # the graph or repository to query + # @param [RDF::Query::Solutions] solutions + # initial solutions for chained queries (RDF::Query::Solutions(RDF::Query::Solution.new)) # @param [Hash{Symbol => Object}] options # any additional keyword options - # @option options [RDF::Query::Solutions] solutions - # optional initial solutions for chained queries # @return [RDF::Solutions] distinct solutions def execute(queryable, solutions: RDF::Query::Solutions(RDF::Query::Solution.new), **options) - log_debug {"formula #{graph_name} #{operands.to_sxp}"} + log_info("formula #{graph_name}") {SXP::Generator.string operands.to_sxp_bin} + log_debug("(formula bindings)") { SXP::Generator.string solutions.to_sxp_bin} - # If we were passed solutions in options, extract bindings to use for query - bindings = solutions.bindings - log_debug {"(formula bindings) #{bindings.map {|k,v| RDF::Query::Variable.new(k,v)}.to_sxp}"} - - # Only query as patterns if this is an embedded formula @query ||= RDF::Query.new(patterns).optimize! - @solutions = @query.patterns.empty? ? solutions : queryable.query(@query, solutions: solutions, bindings: bindings, **options) + log_info("(formula query)") { SXP::Generator.string(@query.to_sxp_bin)} + + solutions = if @query.empty? + solutions + else + these_solutions = queryable.query(@query, solutions: solutions, **options) + if these_solutions.empty? + # Pattern doesn't match, so there can be no solutions + log_debug("(formula query solutions)") { SXP::Generator.string([].to_sxp_bin)} + RDF::Query::Solutions.new + else + these_solutions.map! do |solution| + RDF::Query::Solution.new(solution.to_h.inject({}) do |memo, (name, value)| + # Replace blank node bindings with lists and formula references with formula, where those blank nodes are associated with lists. + value = formulae.fetch(value, value) if value.node? + l = RDF::N3::List.try_list(value, queryable) + value = l if l.constant? + memo.merge(name => value) + end) + end + log_debug("(formula query solutions)") { SXP::Generator.string(these_solutions.to_sxp_bin)} + solutions.merge(these_solutions) + end + end + + return solutions if solutions.empty? - # Merge solution sets # Reject solutions which include variables as values - @solutions = @solutions - .merge(options[:solutions]) - .filter {|s| s.enum_value.none?(&:variable?)} + solutions.filter! {|s| s.enum_value.none?(&:variable?)} # Use our solutions for sub-ops # Join solutions from other operands + # + # * Order operands by those having inputs which are constant or bound. + # * Run built-ins with indeterminant inputs (two-way) until any produces non-empty solutions, and then run remaining built-ins until exhasted or finished. + # * Re-calculate inputs with bound inputs after each built-in is run. log_depth do - sub_ops.each do |op| - @solutions = op.execute(queryable, solutions: @solutions) + # Iterate over sub_ops using evaluation heuristic + ops = sub_ops.sort_by {|op| op.rank(solutions)} + while !ops.empty? + last_op = nil + ops.each do |op| + log_debug("(formula built-in)") {SXP::Generator.string op.to_sxp_bin} + these_solutions = op.execute(queryable, solutions: solutions) + # If there are no solutions, try the next one, until we either run out of operations, or we have solutions + next if these_solutions.empty? + last_op = op + solutions = RDF::Query::Solutions(these_solutions) + break + end + + # If there is no last_op, there are no solutions. + unless last_op + solutions = RDF::Query::Solutions.new + break + end + + # Remove op from list, and re-order remaining ops. + ops = (ops - [last_op]).sort_by {|op| op.rank(solutions)} end end - log_debug {"(formula solutions) #{@solutions.to_sxp}"} + log_info("(formula sub-op solutions)") {SXP::Generator.string solutions.to_sxp_bin} + solutions + end + + ## + # Evaluates the formula using the given variable `bindings` by cloning the formula replacing variables with their bindings recursively. + # + # @param [Hash{Symbol => RDF::Term}] bindings + # a query solution containing zero or more variable bindings + # @param [Hash{Symbol => Object}] options ({}) + # options passed from query + # @return [RDF::N3::List] + # @see SPARQL::Algebra::Expression.evaluate + def evaluate(bindings, formulae:, **options) + return self if bindings.empty? + this = dup + # Maintain formula relationships + formulae {|k, v| this.formulae[k] ||= v} + + # Replace operands with bound operands + this.operands = operands.map do |op| + op.evaluate(bindings, formulae: formulae, **options) + end + this + end + + ## + # Returns `true` if `self` is a {RDF::N3::Algebra::Formula}. + # + # @return [Boolean] + def formula? + true + end - # Only return solutions with distinguished variables - variable_names = @solutions.variable_names.reject {|v| v.to_s.start_with?('$$')} - variable_names.empty? ? @solutions : @solutions.dup.project(*variable_names) + ## + # The formula hash is the hash of it's operands and graph_name. + # + # @see RDF::Value#hash + def hash + ([graph_name] + operands).hash end ## @@ -61,64 +231,88 @@ def execute(queryable, solutions: RDF::Query::Solutions(RDF::Query::Solution.new # # @yield [statement] # each matching statement - # @yieldparam [RDF::Statement] solution + # @yieldparam [RDF::Statement] statement # @yieldreturn [void] ignored - def each(&block) - @solutions ||= begin - # If there are no solutions, create a single solution - RDF::Query::Solutions(RDF::Query::Solution.new) - end - log_debug {"formula #{graph_name} each #{@solutions.to_sxp}"} + def each(solutions: RDF::Query::Solutions(RDF::Query::Solution.new), &block) + log_debug("(formula each)") {SXP::Generator.string([self, solutions].to_sxp_bin)} - # Yield constant statements/patterns - constants.each do |pattern| - log_debug {"(formula constant) #{pattern.to_sxp}"} - block.call(RDF::Statement.from(pattern, graph_name: graph_name)) - end - - # Yield patterns by binding variables - # FIXME: do we need to do something with non-bound non-distinguished extistential variables? - @solutions.each do |solution| + # Yield statements by binding variables + solutions.each do |solution| # Bind blank nodes to the solution when it doesn't contain a solution for an existential variable existential_vars.each do |var| solution[var.name] ||= RDF::Node.intern(var.name.to_s.sub(/^\$+/, '')) end - log_debug {"(formula apply) #{solution.to_sxp} to BGP"} + log_debug("(formula apply)") {solution.to_sxp} # Yield each variable statement which is constant after applying solution - patterns.each do |pattern| - terms = {} - [:subject, :predicate, :object].each do |r| - terms[r] = case o = pattern.send(r) - when RDF::Query::Variable then solution[o] - else o + log_depth do + n3statements.each do |statement| + terms = {} + [:subject, :predicate, :object].each do |part| + terms[part] = case o = statement.send(part) + when RDF::Query::Variable + if solution[o] && solution[o].formula? + log_info("(formula from var form)") {solution[o].graph_name.to_sxp} + form_statements(solution[o], solution: solution, &block) + else + solution[o] || o + end + when RDF::N3::List + o.variable? ? o.evaluate(solution.bindings, formulae: formulae) : o + when RDF::N3::Algebra::Formula + # uses the graph_name of the formula, and yields statements from the formula. No solutions are passed in. + log_info("(formula from form)") {o.graph_name.to_sxp} + form_statements(o, solution: solution, &block) + else + o + end end - end - statement = RDF::Statement.from(terms) + statement = RDF::Statement.from(terms) + log_debug("(formula add)") {statement.to_sxp} - # Sanity checking on statement - if statement.variable? || - statement.predicate.literal? || - statement.subject.is_a?(SPARQL::Algebra::Operator) || - statement.object.is_a?(SPARQL::Algebra::Operator) - log_debug {"(formula skip) #{statement.to_sxp}"} - next + block.call(statement) end - log_debug {"(formula add) #{statement.to_sxp}"} - block.call(statement) + # statements from sub-operands + sub_ops.each do |op| + log_debug("(formula sub_op)") {SXP::Generator.string [op, solution].to_sxp_bin} + op.each(solutions: RDF::Query::Solutions(solution)) do |stmt| + log_debug("(formula add from sub_op)") {stmt.to_sxp} + block.call(stmt) + # Add statements for any term which is a formula + stmt.to_a.select(&:node?).map {|n| formulae[n]}.compact.each do |ef| + log_debug("(formula from form)") {ef.graph_name.to_sxp} + form_statements(ef, solution: solution, &block) + end + end + end end end - - # statements from sub-operands - log_depth {sub_ops.each {|op| op.each(&block)}} end - # Set solutions - # @param [RDF::Query::Solutions] solutions - def solutions=(solutions) - @solutions = solutions + ## + # Yields each pattern which is not a builtin + # + # @yield [pattern] + # each matching pattern + # @yieldparam [RDF::Query::Pattern] pattern + # @yieldreturn [void] ignored + def each_pattern(&block) + n3statements.each do |statement| + terms = {} + [:subject, :predicate, :object].each do |part| + terms[part] = case o = statement.send(part) + when RDF::N3::Algebra::Formula + form_statements(o, solution: RDF::Query::Solution.new(), &block) + else + o + end + end + + pattern = RDF::Query::Pattern.from(terms) + block.call(pattern) + end end # Graph name associated with this formula @@ -126,60 +320,127 @@ def solutions=(solutions) def graph_name; @options[:graph_name]; end ## - # Statements memoizer - def statements - # BNodes in statements are non-distinguished existential variables - @statements ||= operands. - select {|op| op.is_a?(RDF::Statement)}. - map do |pattern| - - # Map nodes to non-distinguished existential variables (except when in top-level formula) - if graph_name - terms = {} - [:subject, :predicate, :object].each do |r| - terms[r] = case o = pattern.send(r) - when RDF::Node then RDF::Query::Variable.new(o.id, existential: true, distinguished: false) - else o - end - end + # The URI of a formula is its graph name + # @return [RDF::URI] + alias_method :to_uri, :graph_name - RDF::Query::Pattern.from(terms) - else - RDF::Query::Pattern.from(pattern) - end - end + # Assign a graph name to this formula + # @param [RDF::Resource] name + # @return [RDF::Resource] + def graph_name=(name) + formulae[name] = self + @options[:graph_name] = name end ## - # Constants memoizer - def constants - # BNodes in statements are existential variables - @constants ||= statements.select(&:constant?) + # Statements memoizer, from the operands which are statements. + # + # Statements may include embedded formulae. + def n3statements + # BNodes in statements are existential variables. + @n3statements ||= begin + # Operations/Builtins are not statements. + operands. + select {|op| op.is_a?(RDF::Statement)} + end end ## - # Patterns memoizer + # Patterns memoizer, from the operands which are statements and not builtins. + # + # Expands statements containing formulae into their statements. def patterns - # BNodes in statements are existential variables - @patterns ||= statements.reject(&:constant?) + # BNodes in statements are existential variables. + @patterns ||= enum_for(:each_pattern).to_a end ## # Non-statement operands memoizer def sub_ops # operands that aren't statements, ordered by their graph_name - @sub_ops ||= operands.reject {|op| op.is_a?(RDF::Statement)} + @sub_ops ||= operands.reject {|op| op.is_a?(RDF::Statement)}.map do |op| + # Substitute nodes for existential variables in operator operands + op.operands.map! do |o| + case o + when RDF::N3::List + # Substitute blank node members with existential variables, recusively. + graph_name && o.has_nodes? ? o.to_ndvar(graph_name) : o + when RDF::Node + graph_name ? o.to_ndvar(graph_name) : o + else + o + end + end + op + end + end + + ## + # Return the variables contained within this formula + # @return [Array] + def vars + operands.vars.flatten.compact + end + + ## + # Universal vars in this formula and sub-formulae + # @return [Array", self.class.name, self.graph_name, self.operands.count) + end + + private + # Get statements from a sub-form + # @return [RDF::Resource] graph name of form + def form_statements(form, solution:, &block) + # uses the graph_name of the formula, and yields statements from the formula + log_depth do + form.each(solutions: RDF::Query::Solutions(solution)) do |stmt| + stmt.graph_name ||= form.graph_name + log_debug("(form statements add)") {stmt.to_sxp} + block.call(stmt) + end + end + + form.graph_name + end end end diff --git a/lib/rdf/n3/algebra/list/append.rb b/lib/rdf/n3/algebra/list/append.rb index d9be709..1d4ef2b 100644 --- a/lib/rdf/n3/algebra/list/append.rb +++ b/lib/rdf/n3/algebra/list/append.rb @@ -1,13 +1,42 @@ module RDF::N3::Algebra::List ## - # Iff the subject is a list of lists and the concatenation of all those lists is the object, then this is true. + # Iff the subject is a list of lists and the concatenation of all those lists is the object, then this is true. The object can be calculated as a function of the subject. + # # @example # ( (1 2) (3 4) ) list:append (1 2 3 4). # # The object can be calculated as a function of the subject. - class Append < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Append < RDF::N3::Algebra::ListOperator NAME = :listAppend + URI = RDF::N3::List.append + + ## + # Resolves this operator using the given variable `bindings`. + # If the last operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + flattened = list.to_a.map(&:to_a).flatten + # Bind a new list based on the values, whos subject use made up from original list subjects + subj = RDF::Node.intern(list.map(&:subject).hash) + RDF::N3::List.new(subject: subj, values: flattened) + end + + ## + # The list argument must be a pair of literals. + # + # @param [RDF::N3::List] list + # @return [Boolean] + # @see RDF::N3::ListOperator#validate + def validate(list) + if super && list.to_a.all? {|li| li.list?} + true + else + log_error(NAME) {"operand is not a list of lists: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/list/first.rb b/lib/rdf/n3/algebra/list/first.rb new file mode 100644 index 0000000..976dbab --- /dev/null +++ b/lib/rdf/n3/algebra/list/first.rb @@ -0,0 +1,24 @@ +module RDF::N3::Algebra::List + ## + # Iff the subject is a list and the object is the first thing that list, then this is true. The object can be calculated as a function of the list. + # + # @example + # { ( 1 2 3 4 5 6 ) list:first 1 } => { :test1 a :SUCCESS }. + # + # The object can be calculated as a function of the list. + class First < RDF::N3::Algebra::ListOperator + NAME = :listFirst + URI = RDF::N3::List.first + + ## + # Resolves this operator using the given variable `bindings`. + # If the last operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.first + end + end +end diff --git a/lib/rdf/n3/algebra/list/in.rb b/lib/rdf/n3/algebra/list/in.rb index 10aed00..4a08102 100644 --- a/lib/rdf/n3/algebra/list/in.rb +++ b/lib/rdf/n3/algebra/list/in.rb @@ -1,9 +1,48 @@ module RDF::N3::Algebra::List ## # Iff the object is a list and the subject is in that list, then this is true. - class In < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @example + # { 1 list:in ( 1 2 3 4 5 ) } => { :test4a a :SUCCESS }. + class In < RDF::N3::Algebra::ListOperator NAME = :listIn + URI = RDF::N3::List.in + + ## + # Evaluates this operator using the given variable `bindings`. + # If the first operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::Queryable] queryable + # the graph or repository to query + # @param [RDF::Query::Solutions] solutions + # solutions for chained queries + # @return [RDF::Query::Solutions] + def execute(queryable, solutions:, **options) + RDF::Query::Solutions(solutions.map do |solution| + subject = operand(0).evaluate(solution.bindings, formulae: formulae) || operand(0) + # Might be a variable or node evaluating to a list in queryable, or might be a list with variables + list = operand(1).evaluate(solution.bindings, formulae: formulae) + next unless list + # If it evaluated to a BNode, re-expand as a list + list = RDF::N3::List.try_list(list, queryable).evaluate(solution.bindings, formulae: formulae) + + log_debug(NAME) {"subject: #{subject.to_sxp}, list: #{list.to_sxp}"} + unless list.list? && list.valid? + log_error(NAME) {"operand is not a list: #{list.to_sxp}"} + next + end + + if subject.variable? + # Bind all list entries to this solution, creates an array of solutions + list.to_a.map do |term| + solution.merge(subject.to_sym => term) + end + elsif list.to_a.include?(subject) + solution + else + nil + end + end.flatten.compact.uniq) + end end end diff --git a/lib/rdf/n3/algebra/list/last.rb b/lib/rdf/n3/algebra/list/last.rb index 301f655..b5017e5 100644 --- a/lib/rdf/n3/algebra/list/last.rb +++ b/lib/rdf/n3/algebra/list/last.rb @@ -1,11 +1,24 @@ module RDF::N3::Algebra::List ## - # Iff the suject is a list and the obbject is the last thing that list, then this is true. + # Iff the subject is a list and the object is the last thing that list, then this is true. The object can be calculated as a function of the list. + # + # @example + # { ( 1 2 3 4 5 6 ) list:last 6 } => { :test1 a :SUCCESS }. # # The object can be calculated as a function of the list. - class Last < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Last < RDF::N3::Algebra::ListOperator NAME = :listLast + URI = RDF::N3::List.last + + ## + # Resolves this operator using the given variable `bindings`. + # If the last operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.last + end end end diff --git a/lib/rdf/n3/algebra/list/length.rb b/lib/rdf/n3/algebra/list/length.rb new file mode 100644 index 0000000..a6ecbf7 --- /dev/null +++ b/lib/rdf/n3/algebra/list/length.rb @@ -0,0 +1,24 @@ +module RDF::N3::Algebra::List + ## + # Iff the subject is a list and the object is the last thing that list, then this is true. The object can be calculated as a function of the list. + # + # @example + # { ( 1 2 3 4 5 6 ) list:length 6 } => { :test1 a :SUCCESS }. + # + # The object can be calculated as a function of the list. + class Length < RDF::N3::Algebra::ListOperator + NAME = :listLength + URI = RDF::N3::List.length + + ## + # Resolves this operator using the given variable `bindings`. + # If the last operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + RDF::Literal(list.length) + end + end +end diff --git a/lib/rdf/n3/algebra/list/member.rb b/lib/rdf/n3/algebra/list/member.rb index 9633743..acb8200 100644 --- a/lib/rdf/n3/algebra/list/member.rb +++ b/lib/rdf/n3/algebra/list/member.rb @@ -1,7 +1,44 @@ module RDF::N3::Algebra::List ## - # Iff the subject is a list and the obbject is in that list, then this is true. - class Member < SPARQL::Algebra::Operator::Binary + # Iff the subject is a list and the object is in that list, then this is true. + class Member < RDF::N3::Algebra::ListOperator NAME = :listMember + URI = RDF::N3::List.member + + ## + # Evaluates this operator using the given variable `bindings`. + # If the last operand is a variable, it creates a solution for each element in the list. + # + # @param [RDF::Queryable] queryable + # the graph or repository to query + # @param [RDF::Query::Solutions] solutions + # solutions for chained queries + # @return [RDF::Query::Solutions] + def execute(queryable, solutions:, **options) + RDF::Query::Solutions(solutions.map do |solution| + list = operand(0).evaluate(solution.bindings, formulae: formulae) + next unless list + list = RDF::N3::List.try_list(list, queryable).evaluate(solution.bindings, formulae: formulae) + object = operand(1).evaluate(solution.bindings, formulae: formulae) || operand(1) + object = formulae[object].deep_dup if object.node? && formulae.has_key?(object) + + log_debug(NAME) {"list: #{list.to_sxp}, object: #{object.to_sxp}"} + unless list.list? && list.valid? + log_error(NAME) {"operand is not a list: #{list.to_sxp}"} + next + end + + if object.variable? + # Bind all list entries to this solution, creates an array of solutions + list.to_a.map do |term| + solution.merge(object.to_sym => term) + end + elsif list.to_a.include?(object) + solution + else + nil + end + end.flatten.compact.uniq) + end end end diff --git a/lib/rdf/n3/algebra/list_operator.rb b/lib/rdf/n3/algebra/list_operator.rb new file mode 100644 index 0000000..5ae1ab8 --- /dev/null +++ b/lib/rdf/n3/algebra/list_operator.rb @@ -0,0 +1,83 @@ +module RDF::N3::Algebra + ## + # This is a generic operator where the subject is a list or binds to a list and the object is either a constant that equals the evaluation of the subject, or a variable to which the result is bound in a solution + class ListOperator < SPARQL::Algebra::Operator::Binary + include SPARQL::Algebra::Query + include SPARQL::Algebra::Update + include RDF::N3::Algebra::Builtin + + NAME = :listOperator + + ## + # The operator takes a list and provides a mechanism for subclasses to operate over (and validate) that list argument. + # + # @param [RDF::Queryable] queryable + # the graph or repository to query + # @param [RDF::Query::Solutions] solutions + # solutions for chained queries + # @return [RDF::Query::Solutions] + def execute(queryable, solutions:, **options) + RDF::Query::Solutions(solutions.map do |solution| + # Might be a variable or node evaluating to a list in queryable, or might be a list with variables + subject = operand(0).evaluate(solution.bindings, formulae: formulae) + next unless subject + # If it evaluated to a BNode, re-expand as a list + subject = RDF::N3::List.try_list(subject, queryable).evaluate(solution.bindings, formulae: formulae) + object = operand(1).evaluate(solution.bindings, formulae: formulae) || operand(1) + object = formulae.fetch(object, object) if object.node? + + log_info(self.class.const_get(:NAME), "subject") {SXP::Generator.string(subject.to_sxp_bin).strip} + log_info(self.class.const_get(:NAME), "object") {SXP::Generator.string(object.to_sxp_bin).strip} + next unless validate(subject) + + lhs = resolve(subject) + if lhs.nil? + log_error(self.class.const_get(:NAME), "subject evaluates to null") {subject.inspect} + next + end + + if object.variable? + log_debug(self.class.const_get(:NAME), "result") {SXP::Generator.string(lhs.to_sxp_bin).strip} + solution.merge(object.to_sym => lhs) + elsif object != lhs + log_debug(self.class.const_get(:NAME), "result: false") + nil + else + log_debug(self.class.const_get(:NAME), "result: true") + solution + end + end.compact.uniq) + end + + ## + # Input is generically the subject + # + # @return [RDF::Term] + def input_operand + operand(0) + end + + ## + # Subclasses implement `resolve`. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + def resolve(list) + raise NotImplemented + end + + ## + # Subclasses may override or supplement validate to perform validation on the list subject + # + # @param [RDF::N3::List] list + # @return [Boolean] + def validate(list) + if list.list? && list.valid? + true + else + log_error(NAME) {"operand is not a list: #{list.to_sxp}"} + false + end + end + end +end diff --git a/lib/rdf/n3/algebra/log/conclusion.rb b/lib/rdf/n3/algebra/log/conclusion.rb index 569fe4e..e5e790a 100644 --- a/lib/rdf/n3/algebra/log/conclusion.rb +++ b/lib/rdf/n3/algebra/log/conclusion.rb @@ -3,7 +3,63 @@ module RDF::N3::Algebra::Log # All possible conclusions which can be drawn from a formula. # # The object of this function, a formula, is the set of conclusions which can be drawn from the subject formula, by successively applying any rules it contains to the data it contains. This is equivalent to cwm's "--think" command line function. It does use built-ins, so it may for example indirectly invoke other documents, validate signatures, etc. - class Conclusion < SPARQL::Algebra::Operator::Binary + class Conclusion < RDF::N3::Algebra::ResourceOperator NAME = :logConclusion + URI = RDF::N3::Log.conclusion + + ## + # Evaluates this operator by creating a new formula containing the triples generated by reasoning over the input formula using think. + # + # The subject is evaluated into an isolated repository so that conclusions evaluated when evaluating the subject are not necessarily conclusions resulting from evaluating this operator. + # + # @param [RDF::N3::Algebra:Formula] resource + # @return [RDF::N3::Algebra::Formula] + # @see RDF::N3::ListOperator#evaluate + def resolve(resource, position:) + return resource unless position == :subject + + log_depth do + reasoner = RDF::N3::Reasoner.new(resource, **@options) + conclusions = RDF::N3::Repository.new + reasoner.execute(think: true) {|stmt| conclusions << stmt} + + # The result is a formula containing the conclusions + form = RDF::N3::Algebra::Formula.from_enumerable(conclusions, **@options).deep_dup + + log_info("#{NAME} resolved") {SXP::Generator.string form.to_sxp_bin} + form + end + end + + ## + # To be valid, subject must be a formula, and object a formula or variable. + # + # @param [RDF::Term] subject + # @param [RDF::Term] object + # @return [Boolean] + def valid?(subject, object) + subject.formula? && (object.formula? || object.is_a?(RDF::Query::Variable)) + end + + ## + # Return subject operand. + # + # @return [RDF::Term] + def input_operand + operands.first + end + + ## + # Yields statements, and de-asserts `inferred` from the subject. + # + # @yield [statement] + # each matching statement + # @yieldparam [RDF::Statement] solution + # @yieldreturn [void] ignored + def each(solutions:, &block) + super do |stmt| + block.call(RDF::Statement.from(stmt.to_quad)) + end + end end end diff --git a/lib/rdf/n3/algebra/log/conjunction.rb b/lib/rdf/n3/algebra/log/conjunction.rb index 55bc59c..ab3fcfe 100644 --- a/lib/rdf/n3/algebra/log/conjunction.rb +++ b/lib/rdf/n3/algebra/log/conjunction.rb @@ -3,7 +3,34 @@ module RDF::N3::Algebra::Log # A function to merge formulae: logical AND. # # The subject is a list of formulae. The object, which can be generated, is a formula containing a copy of each of the formulae in the list on the left. A cwm built-in function. - class Conjunction < SPARQL::Algebra::Operator::Binary + class Conjunction < RDF::N3::Algebra::ListOperator NAME = :logConjunction + URI = RDF::N3::Log.conjunction + + ## + # Evaluates this operator by creating a new formula containing the triples from each of the formulae in the list. + # + # @param [RDF::N3::List] list + # @return [RDF::N3::Algebra::Formula] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + form = RDF::N3::Algebra::Formula.new(graph_name: RDF::Node.intern(list.hash)) + log_debug(NAME, "list hash") {form.graph_name} + + list.each do |f| + form.operands.push(*f.operands) + end + form = form.dup + log_info(NAME, "result") {SXP::Generator.string form.to_sxp_bin} + form + end + + ## + # Return subject operand. + # + # @return [RDF::Term] + def input_operand + operands.first + end end end diff --git a/lib/rdf/n3/algebra/log/content.rb b/lib/rdf/n3/algebra/log/content.rb new file mode 100644 index 0000000..538393c --- /dev/null +++ b/lib/rdf/n3/algebra/log/content.rb @@ -0,0 +1,34 @@ +module RDF::N3::Algebra::Log + ## + # This connects a document and a string that represents it. + # + # (Cwm knows how to go get a document in order to evaluate this.) + # + # Note that the content-type of the information is not given and so must be known or guessed. + class Content < RDF::N3::Algebra::ResourceOperator + NAME = :logContent + URI = RDF::N3::Log.content + + ## + # Reads the subject into the object. + # + # Returns nil if resource does not validate, given its position + # + # @param [RDF::N3::List] resource + # @return [RDF::Term] + def resolve(resource, position: :subject) + case position + when :subject + return nil unless resource.literal? || resource.uri? + content = begin + as_literal(RDF::Util::File.open_file(resource) {|f| f.read}) + rescue IOError + nil + end + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/log/equalTo.rb b/lib/rdf/n3/algebra/log/equalTo.rb deleted file mode 100644 index e93a7e5..0000000 --- a/lib/rdf/n3/algebra/log/equalTo.rb +++ /dev/null @@ -1,7 +0,0 @@ -module RDF::N3::Algebra::Log - ## - # True if the subject and object are the same RDF node (symbol or literal). Do not confuse with owl:sameAs. A cwm built-in logical operator, RDF graph level. - class EqualTo < SPARQL::Algebra::Operator::Binary - NAME = :logEqualTo - end -end diff --git a/lib/rdf/n3/algebra/log/equal_to.rb b/lib/rdf/n3/algebra/log/equal_to.rb new file mode 100644 index 0000000..ba623e5 --- /dev/null +++ b/lib/rdf/n3/algebra/log/equal_to.rb @@ -0,0 +1,34 @@ +module RDF::N3::Algebra::Log + ## + # True if the subject and object are the same RDF node (symbol or literal). Do not confuse with owl:sameAs. A cwm built-in logical operator, RDF graph level. + class EqualTo < RDF::N3::Algebra::ResourceOperator + NAME = :logEqualTo + URI = RDF::N3::Log.equalTo + + ## + # Resolves inputs as terms. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.sameTerm?(right)) + end + end +end diff --git a/lib/rdf/n3/algebra/log/implies.rb b/lib/rdf/n3/algebra/log/implies.rb index 4935f5c..0cd23e7 100644 --- a/lib/rdf/n3/algebra/log/implies.rb +++ b/lib/rdf/n3/algebra/log/implies.rb @@ -8,12 +8,15 @@ module RDF::N3::Algebra::Log class Implies < SPARQL::Algebra::Operator::Binary include SPARQL::Algebra::Query include SPARQL::Algebra::Update - include RDF::Enumerable - include RDF::Util::Logger + include RDF::N3::Algebra::Builtin NAME = :logImplies + URI = RDF::N3::Log.implies - # Yields solutions from subject. Solutions are created by evaluating subject against `queryable`. + ## + # Returns solutions from subject. Solutions are created by evaluating subject against `queryable`. + # + # Solutions are kept within this instance, and used for conclusions. Note that the evaluated solutions do not affect that of the invoking formula, as the solution spaces are disjoint. # # @param [RDF::Queryable] queryable # the graph or repository to query @@ -24,14 +27,44 @@ class Implies < SPARQL::Algebra::Operator::Binary # @return [RDF::Solutions] distinct solutions def execute(queryable, solutions:, **options) @queryable = queryable - log_debug {"logImplies"} - @solutions = log_depth {operands.first.execute(queryable, solutions: solutions, **options)} - log_debug {"(logImplies solutions) #{@solutions.to_sxp}"} + @solutions = RDF::Query::Solutions(solutions.map do |solution| + log_debug(NAME, "solution") {SXP::Generator.string(solution.to_sxp_bin)} + subject = operand(0).evaluate(solution.bindings, formulae: formulae) + object = operand(1).evaluate(solution.bindings, formulae: formulae) + log_info(NAME, "subject") {SXP::Generator.string(subject.to_sxp_bin)} + log_info(NAME, "object") {SXP::Generator.string(object.to_sxp_bin)} + + # Nothing to do if variables aren't resolved. + next unless subject && object + + solns = log_depth {subject.execute(queryable, solutions: RDF::Query::Solutions(solution), **options)} + + # Execute object as well (typically used for log:outputString) + solns.each do |soln| + log_depth {object.execute(queryable, solutions: RDF::Query::Solutions(soln), **options)} + end + + # filter solutions where not all variables in antecedant are bound. + vars = subject.universal_vars + solns = RDF::Query::Solutions(solns.to_a.select do |soln| + vars.all? {|v| soln.bound?(v)} + end) + solns + end.flatten.compact.uniq) + log_info(NAME) {SXP::Generator.string(@solutions.to_sxp_bin)} # Return original solutions, without bindings solutions end + ## + # Clear out any cached solutions. + # This principaly is for log:conclusions + def clear_solutions + super + @solutions = nil + end + ## # Yields statements from the object based on solutions determined from the subject. Each solution formed by querying `queryable` from the subject is used to create a graph, which must be a subgraph of `queryable`. If so, that solution is used to generate triples from the object formula which are yielded. # @@ -39,34 +72,26 @@ def execute(queryable, solutions:, **options) # each matching statement # @yieldparam [RDF::Statement] solution # @yieldreturn [void] ignored - def each(&block) - @solutions ||= RDF::Query::Solutions.new - log_debug {"logImplies each #{@solutions.to_sxp}"} - subject, object = operands - - if @solutions.empty? - # Some evalaluatable operand evaluated to false - log_debug("(logImplies implication false - no solutions)") - return - end - - # Graph based on solutions from subject - subject_graph = log_depth {RDF::Graph.new {|g| g << subject}} + def each(solutions: RDF::Query::Solutions(), &block) + # Merge solutions in with those for the evaluation of this implication + # Clear out solutions so they don't get remembered erroneously. + solutions, @solutions = Array(@solutions), nil + log_depth do + super(solutions: RDF::Query::Solutions(RDF::Query::Solution.new), &block) - # Use solutions from subject for object - object.solutions = @solutions + solutions.each do |solution| + log_info("(logImplies each) solution") {SXP::Generator.string solution.to_sxp_bin} + object = operand(1).evaluate(solution.bindings, formulae: formulae) + log_info("(logImplies each) object") {SXP::Generator.string object.to_sxp_bin} - # Nothing emitted if @solutions is not complete. Solutions are complete when all variables are bound. - if @queryable.contain?(subject_graph) - log_debug("(logImplies implication true)") - # Yield statements into the default graph - log_depth do - object.each do |statement| - block.call(RDF::Statement.from(statement.to_triple, inferred: true, graph_name: graph_name)) + # Yield inferred statements + log_depth do + object.each(solutions: RDF::Query::Solutions(solution)) do |statement| + log_debug(("(logImplies each) infer\s")) {statement.to_sxp} + block.call(RDF::Statement.from(statement.to_quad, inferred: true)) + end end end - else - log_debug("(logImplies implication false)") end end diff --git a/lib/rdf/n3/algebra/log/includes.rb b/lib/rdf/n3/algebra/log/includes.rb index ee3aa3d..949ef78 100644 --- a/lib/rdf/n3/algebra/log/includes.rb +++ b/lib/rdf/n3/algebra/log/includes.rb @@ -7,7 +7,64 @@ module RDF::N3::Algebra::Log # Variable substitution is applied recursively to nested compound terms such as formulae, lists and sets. # # (Understood natively by cwm when in in the antecedent of a rule. You can use this to peer inside nested formulae.) - class Includes < SPARQL::Algebra::Operator::Binary + class Includes < RDF::N3::Algebra::ResourceOperator NAME = :logIncludes + URI = RDF::N3::Log.includes + + ## + # Both subject and object must be formulae. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.formula? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # Creates a repository constructed by substituting variables and in that subject with known IRIs and queries object against that repository. Either retuns a single solution, or no solutions. + # + # @note this does allow object to have variables not in the subject, if they could have been substituted away. + # + # @param [RDF::N3::Algebra::Formula] subject + # a formula + # @param [RDF::N3::Algebra::Formula] object + # a formula + # @return [RDF::Literal::Boolean] + def apply(subject, object) + subject_var_map = subject.variables.values.inject({}) {|memo, v| memo.merge(v => RDF::URI(v.name))} + object_vars = object.variables.keys + log_debug(NAME, "subject var map") {SXP::Generator.string(subject_var_map.to_sxp_bin)} + log_debug(NAME, "object vars") {SXP::Generator.string(object_vars.to_sxp_bin)} + # create a queryable from subject, replacing variables with IRIs for thsoe variables. + queryable = RDF::Repository.new do |r| + log_depth do + subject.each do |stmt| + parts = stmt.to_quad.map do |part| + part.is_a?(RDF::Query::Variable) ? subject_var_map.fetch(part) : part + end + r << RDF::Statement.from(parts) + end + end + end + + # Query object against subject + solns = log_depth {queryable.query(object, **@options)} + log_info("(#{NAME} solutions)") {SXP::Generator.string solns.to_sxp_bin} + + if !solns.empty? && (object_vars - solns.variable_names).empty? + # Return solution + solns.first + else + # Return false, + RDF::Literal::FALSE + end + end end end diff --git a/lib/rdf/n3/algebra/log/n3_string.rb b/lib/rdf/n3/algebra/log/n3_string.rb new file mode 100644 index 0000000..c2e8a00 --- /dev/null +++ b/lib/rdf/n3/algebra/log/n3_string.rb @@ -0,0 +1,34 @@ +module RDF::N3::Algebra::Log + ## + # The subject formula, expressed as N3, gives this string. + class N3String < RDF::N3::Algebra::ResourceOperator + NAME = :logN3String + URI = RDF::N3::Log.n3String + + ## + # Serializes the subject formula into an N3 string representation. + # + # @param [RDF::N3::List] resource + # @return [RDF::Term] + def resolve(resource, position: :subject) + case position + when :subject + return nil unless resource.formula? + as_literal(RDF::N3::Writer.buffer {|w| resource.each {|st| w << st}}) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # Subject must evaluate to a formula and object to a literal. + # + # @param [RDF::Term] subject + # @param [RDF::Term] object + # @return [Boolean] + def valid?(subject, object) + subject.formula? && (object.variable? || object.literal?) + end + end +end diff --git a/lib/rdf/n3/algebra/log/notEqualTo.rb b/lib/rdf/n3/algebra/log/notEqualTo.rb deleted file mode 100644 index 8346b6c..0000000 --- a/lib/rdf/n3/algebra/log/notEqualTo.rb +++ /dev/null @@ -1,7 +0,0 @@ -module RDF::N3::Algebra::Log - ## - # Equality in this sense is actually the same URI. A cwm built-in logical operator. - class NotEqualTo < SPARQL::Algebra::Operator::Binary - NAME = :logNotEqualTo - end -end diff --git a/lib/rdf/n3/algebra/log/notIncludes.rb b/lib/rdf/n3/algebra/log/notIncludes.rb deleted file mode 100644 index d73513d..0000000 --- a/lib/rdf/n3/algebra/log/notIncludes.rb +++ /dev/null @@ -1,12 +0,0 @@ -module RDF::N3::Algebra::Log - ## - # The object formula is NOT a subset of subject. True iff log:includes is false. The converse of log:includes. - # (Understood natively by cwm. The subject formula may contain variables.) - # - # (In cwm, variables must of course end up getting bound before the log:include test can be done, or an infinite result set would result) - # - # Related: See includes - class NotIncludes < SPARQL::Algebra::Operator::Binary - NAME = :logNotIncludes - end -end diff --git a/lib/rdf/n3/algebra/log/not_equal_to.rb b/lib/rdf/n3/algebra/log/not_equal_to.rb new file mode 100644 index 0000000..ddf54e0 --- /dev/null +++ b/lib/rdf/n3/algebra/log/not_equal_to.rb @@ -0,0 +1,23 @@ +module RDF::N3::Algebra::Log + ## + # Equality in this sense is actually the same URI. A cwm built-in logical operator. + class NotEqualTo < SPARQL::Algebra::Operator::SameTerm + include RDF::N3::Algebra::Builtin + NAME = :logNotEqualTo + URI = RDF::N3::Log.notEqualto + + ## + # Returns `true` if the operands are not the same RDF term; returns + # `false` otherwise. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is unbound + def apply(term1, term2) + RDF::Literal(!term1.eql?(term2)) + end + end +end diff --git a/lib/rdf/n3/algebra/log/not_includes.rb b/lib/rdf/n3/algebra/log/not_includes.rb new file mode 100644 index 0000000..3789bb3 --- /dev/null +++ b/lib/rdf/n3/algebra/log/not_includes.rb @@ -0,0 +1,27 @@ +module RDF::N3::Algebra::Log + ## + # The object formula is NOT a subset of subject. True iff log:includes is false. The converse of log:includes. + # (Understood natively by cwm. The subject formula may contain variables.) + # + # (In cwm, variables must of course end up getting bound before the log:include test can be done, or an infinite result set would result) + # + # Related: See includes + class NotIncludes < Includes + NAME = :logNotIncludes + URI = RDF::N3::Log.notIncludes + + ## + # Uses log:includes and returns a solution if log:includes fails + # + # @param [RDF::Queryable] queryable + # the graph or repository to query + # @param [Hash{Symbol => Object}] options + # any additional keyword options + # @option options [RDF::Query::Solutions] solutions + # optional initial solutions for chained queries + # @return [RDF::Solutions] distinct solutions + def execute(queryable, solutions:, **options) + super.empty? ? RDF::Query::Solutions(RDF::Query::Solution.new) : RDF::Query::Solutions.new + end + end +end diff --git a/lib/rdf/n3/algebra/log/outputString.rb b/lib/rdf/n3/algebra/log/outputString.rb deleted file mode 100644 index 0b525e5..0000000 --- a/lib/rdf/n3/algebra/log/outputString.rb +++ /dev/null @@ -1,7 +0,0 @@ -module RDF::N3::Algebra::Log - ## - # The subject is a key and the object is a string, where the strings are to be output in the order of the keys. - class OutputString < SPARQL::Algebra::Operator::Binary - NAME = :logOutputString - end -end diff --git a/lib/rdf/n3/algebra/log/output_string.rb b/lib/rdf/n3/algebra/log/output_string.rb new file mode 100644 index 0000000..5bc533b --- /dev/null +++ b/lib/rdf/n3/algebra/log/output_string.rb @@ -0,0 +1,40 @@ +module RDF::N3::Algebra::Log + ## + # The subject is a key and the object is a string, where the strings are to be output in the order of the keys. + class OutputString < RDF::N3::Algebra::ResourceOperator + NAME = :logOutputString + URI = RDF::N3::Log.outputString + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + SPARQL::Algebra::Expression.cast(RDF::XSD.string, resource) if resource.term? + end + + ## + # Returns `term2`, but adds `term2` as an output keyed on `term1`. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + (@options[:strings][term1.to_s] ||= []) << term2.to_s + term2 + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/log/parsed_as_n3.rb b/lib/rdf/n3/algebra/log/parsed_as_n3.rb new file mode 100644 index 0000000..3ab6ddc --- /dev/null +++ b/lib/rdf/n3/algebra/log/parsed_as_n3.rb @@ -0,0 +1,36 @@ +module RDF::N3::Algebra::Log + ## + # The subject string, parsed as N3, gives this formula. + class ParsedAsN3 < RDF::N3::Algebra::ResourceOperator + NAME = :logParsedAsN3 + URI = RDF::N3::Log.parsedAsN3 + + ## + # Parses the subject into a new formula. + # + # Returns nil if resource does not validate, given its position + # + # @param [RDF::N3::List] resource + # @return [RDF::Term] + def resolve(resource, position: :subject) + case position + when :subject + return nil unless resource.literal? + begin + repo = RDF::N3::Repository.new + repo << RDF::N3::Reader.new(resource.to_s, **@options.merge(list_terms: true, logger: false)) + log_debug("logParsedAsN3") {SXP::Generator.string repo.statements.to_sxp_bin} + content_hash = resource.hash # used as name of resulting formula + form = RDF::N3::Algebra::Formula.from_enumerable(repo, graph_name: RDF::Node.intern(content_hash)) + log_info(NAME) {"form hash (#{resource}): #{form.hash}"} + form + rescue RDF::ReaderError + nil + end + when :object + return nil unless resource.literal? || resource.is_a?(RDF::Query::Variable) + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/log/semantics.rb b/lib/rdf/n3/algebra/log/semantics.rb new file mode 100644 index 0000000..31b90de --- /dev/null +++ b/lib/rdf/n3/algebra/log/semantics.rb @@ -0,0 +1,40 @@ +module RDF::N3::Algebra::Log + ## + # The log:semantics of a document is the formula. achieved by parsing representation of the document. For a document in Notation3, log:semantics is the log:parsedAsN3 of the log:contents of the document. For a document in RDF/XML, it is parsed according to the RDF/XML specification to yield an RDF formula (a subclass of N3 log:Formula). + # + # [Aside: Philosophers will be distracted here into worrying about the meaning of meaning. At least we didn't call this function "meaning"! In as much as N3 is used as an interlingua for interoperability for different systems, this for an N3 based system is the meaning expressed by a document.] + # + # (Cwm knows how to go get a document and parse N3 and RDF/XML it in order to evaluate this. Other languages for web documents may be defined whose N3 semantics are therefore also calculable, and so they could be added in due course. See for example GRDDL, RDFa, etc) + class Semantics < RDF::N3::Algebra::ResourceOperator + NAME = :logSemantics + URI = RDF::N3::Log.semantics + + ## + # Parses the subject into a new formula. + # + # Returns nil if resource does not validate, given its position + # + # @param [RDF::N3::List] resource + # @return [RDF::Term] + def resolve(resource, position: :subject) + case position + when :subject + return nil unless resource.literal? || resource.uri? + begin + repo = RDF::N3::Repository.new + repo << RDF::Reader.open(resource, **@options.merge(list_terms: true, base_uri: resource, logger: false)) + content_hash = repo.statements.hash # used as name of resulting formula + form = RDF::N3::Algebra::Formula.from_enumerable(repo, graph_name: RDF::Node.intern(content_hash)) + log_debug(NAME) {"form hash (#{resource}): #{form.hash}"} + form + rescue IOError, RDF::ReaderError => e + log_error(NAME) {"error loading #{resource}: #{e}"} + nil + end + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/absoluteValue.rb b/lib/rdf/n3/algebra/math/absoluteValue.rb deleted file mode 100644 index f7287fe..0000000 --- a/lib/rdf/n3/algebra/math/absoluteValue.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # The object is calulated as the absolute value of the subject. - class AbsoluteValue < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathAbsoluteValue - end -end diff --git a/lib/rdf/n3/algebra/math/absolute_value.rb b/lib/rdf/n3/algebra/math/absolute_value.rb new file mode 100644 index 0000000..74008c9 --- /dev/null +++ b/lib/rdf/n3/algebra/math/absolute_value.rb @@ -0,0 +1,36 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the absolute value of the subject. + # + # @see https://www.w3.org/TR/xpath-functions/#func-abs + class AbsoluteValue < RDF::N3::Algebra::ResourceOperator + NAME = :mathAbsoluteValue + URI = RDF::N3::Math.absoluteValue + + ## + # The math:absoluteValue operator takes string or number and calculates its absolute value. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(resource.as_number.abs) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/acos.rb b/lib/rdf/n3/algebra/math/acos.rb new file mode 100644 index 0000000..5e950f0 --- /dev/null +++ b/lib/rdf/n3/algebra/math/acos.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the arc cosine value of the subject. + class ACos < RDF::N3::Algebra::ResourceOperator + NAME = :mathACos + URI = RDF::N3::Math.acos + + ## + # The math:acos operator takes string or number and calculates its arc cosine. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.acos(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/acosh.rb b/lib/rdf/n3/algebra/math/acosh.rb new file mode 100644 index 0000000..c0edc1e --- /dev/null +++ b/lib/rdf/n3/algebra/math/acosh.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the inverse hyperbolic cosine value of the subject. + class ACosH < RDF::N3::Algebra::ResourceOperator + NAME = :mathACosH + URI = RDF::N3::Math.acosh + + ## + # The math:acosh operator takes string or number and calculates its inverse hyperbolic cosine. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.acosh(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/asin.rb b/lib/rdf/n3/algebra/math/asin.rb new file mode 100644 index 0000000..4b262ea --- /dev/null +++ b/lib/rdf/n3/algebra/math/asin.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the arc sine value of the subject. + class ASin < RDF::N3::Algebra::ResourceOperator + NAME = :mathASin + URI = RDF::N3::Math.asin + + ## + # The math:asin operator takes string or number and calculates its arc sine. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.asin(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/asinh.rb b/lib/rdf/n3/algebra/math/asinh.rb new file mode 100644 index 0000000..1c5374f --- /dev/null +++ b/lib/rdf/n3/algebra/math/asinh.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the inverse hyperbolic sine value of the subject. + class ASinH < RDF::N3::Algebra::ResourceOperator + NAME = :mathASinH + URI = RDF::N3::Math.asinh + + ## + # The math:asinh operator takes string or number and calculates its inverse hyperbolic sine. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.asinh(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/atan.rb b/lib/rdf/n3/algebra/math/atan.rb new file mode 100644 index 0000000..03f4020 --- /dev/null +++ b/lib/rdf/n3/algebra/math/atan.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the arc tangent value of the subject. + class ATan < RDF::N3::Algebra::ResourceOperator + NAME = :mathATan + URI = RDF::N3::Math.atan + + ## + # The math:atan operator takes string or number and calculates its arc tangent. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.atan(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/atanh.rb b/lib/rdf/n3/algebra/math/atanh.rb new file mode 100644 index 0000000..226ad1d --- /dev/null +++ b/lib/rdf/n3/algebra/math/atanh.rb @@ -0,0 +1,26 @@ +module RDF::N3::Algebra::Math + ## + # The object is calulated as the inverse hyperbolic tangent value of the subject. + class ATanH < RDF::N3::Algebra::ResourceOperator + NAME = :mathATanH + URI = RDF::N3::Math.atanh + + ## + # The math:atanh operator takes string or number and calculates its inverse hyperbolic tangent. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(Math.atanh(resource.as_number.object)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/ceiling.rb b/lib/rdf/n3/algebra/math/ceiling.rb new file mode 100644 index 0000000..0e27a2d --- /dev/null +++ b/lib/rdf/n3/algebra/math/ceiling.rb @@ -0,0 +1,28 @@ +module RDF::N3::Algebra::Math + ## + # The object is calculated as the subject upwards to a whole number. + # + # @see https://www.w3.org/TR/xpath-functions/#func-ceiling + class Ceiling < RDF::N3::Algebra::ResourceOperator + NAME = :mathCeiling + URI = RDF::N3::Math.ceiling + + ## + # The math:ceiling operator takes string or number and calculates its ceiling. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(resource.as_number.ceil) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/cos.rb b/lib/rdf/n3/algebra/math/cos.rb new file mode 100644 index 0000000..7ed88b1 --- /dev/null +++ b/lib/rdf/n3/algebra/math/cos.rb @@ -0,0 +1,40 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the cosine value of the subject. + # + # @see https://www.w3.org/TR/xpath-functions/#func-math-cos + class Cos < RDF::N3::Algebra::ResourceOperator + NAME = :mathCos + URI = RDF::N3::Math.cos + + ## + # The math:cos operator takes string or number and calculates its cosine. The arc cosine of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.cos(resource.as_number.object)) + when :object + as_literal(Math.acos(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/cosh.rb b/lib/rdf/n3/algebra/math/cosh.rb new file mode 100644 index 0000000..a857b4a --- /dev/null +++ b/lib/rdf/n3/algebra/math/cosh.rb @@ -0,0 +1,38 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the hyperbolic cosine value of the subject. + class CosH < RDF::N3::Algebra::ResourceOperator + NAME = :mathCosH + URI = RDF::N3::Math.cosh + + ## + # The math:cosh operator takes string or number and calculates its hyperbolic cosine. The inverse hyperbolic cosine of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.cosh(resource.as_number.object)) + when :object + as_literal(Math.acosh(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/difference.rb b/lib/rdf/n3/algebra/math/difference.rb index 15f89c9..01dd69a 100644 --- a/lib/rdf/n3/algebra/math/difference.rb +++ b/lib/rdf/n3/algebra/math/difference.rb @@ -1,9 +1,40 @@ module RDF::N3::Algebra::Math ## # The subject is a pair of numbers. The object is calculated by subtracting the second number of the pair from the first. - class Difference < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @example + # { ("8" "3") math:difference ?x} => { ?x :valueOf "8 - 3" } . + # { ("8") math:difference ?x } => { ?x :valueOf "8 - (error?)" } . + # { (8 3) math:difference ?x} => { ?x :valueOf "8 - 3" } . + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-subtract + class Difference < RDF::N3::Algebra::ListOperator NAME = :mathDifference + URI = RDF::N3::Math.difference + + ## + # The math:difference operator takes a pair of strings or numbers and calculates their difference. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:-) + end + + ## + # The list argument must be a pair of literals. + # + # @param [RDF::N3::List] list + # @return [Boolean] + # @see RDF::N3::ListOperator#validate + def validate(list) + if super && list.all?(&:literal?) && list.length == 2 + true + else + log_error(NAME) {"list is not a pair of literals: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/math/equalTo.rb b/lib/rdf/n3/algebra/math/equalTo.rb deleted file mode 100644 index 0505047..0000000 --- a/lib/rdf/n3/algebra/math/equalTo.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is EQUAL TO a number of which the object is a string representation. - class EqualTo < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathEqualTo - end -end diff --git a/lib/rdf/n3/algebra/math/equal_to.rb b/lib/rdf/n3/algebra/math/equal_to.rb new file mode 100644 index 0000000..d745d96 --- /dev/null +++ b/lib/rdf/n3/algebra/math/equal_to.rb @@ -0,0 +1,54 @@ +module RDF::N3::Algebra::Math + ## + # **schema**: + # `$a1 math:equalTo $a2` + # + # **summary**: + # checks equality of numbers + # + # **definition**: + # `true` if and only if `$a1` is equal to `$a2`. + # Requires both arguments to be either concrete numerals, or variables bound to a numeral. + # + # **literal domains**: + # + # * `$a1`: `xs:decimal` (or its derived types), `xs:float`, or `xs:double` (see note on type promotion, and casting from string) + # * `$a2`: `xs:decimal` (or its derived types), `xs:float`, or `xs:double` (see note on type promotion, and casting from string) + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-equal + class EqualTo < RDF::N3::Algebra::ResourceOperator + NAME = :mathEqualTo + URI = RDF::N3::Math.equalTo + + ## + # Resolves inputs as numbers. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource.as_number if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # Returns TRUE if `term1` and `term2` are the same numeric value. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(term1 == term2) + end + end +end diff --git a/lib/rdf/n3/algebra/math/exponentiation.rb b/lib/rdf/n3/algebra/math/exponentiation.rb index 9eaf9bd..5b22e17 100644 --- a/lib/rdf/n3/algebra/math/exponentiation.rb +++ b/lib/rdf/n3/algebra/math/exponentiation.rb @@ -1,9 +1,35 @@ module RDF::N3::Algebra::Math ## # The subject is a pair of numbers. The object is calculated by raising the first number of the power of the second. - class Exponentiation < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @see https://www.w3.org/TR/xpath-functions/#func-math-exp + class Exponentiation < RDF::N3::Algebra::ListOperator NAME = :mathExponentiation + URI = RDF::N3::Math.exponentiation + + ## + # The math:difference operator takes a pair of strings or numbers and calculates the exponent. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:**) + end + + ## + # The list argument must be a pair of literals. + # + # @param [RDF::N3::List] list + # @return [Boolean] + # @see RDF::N3::ListOperator#validate + def validate(list) + if super && list.all?(&:literal?) && list.length == 2 + true + else + log_error(NAME) {"list is not a pair of literals: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/math/floor.rb b/lib/rdf/n3/algebra/math/floor.rb new file mode 100644 index 0000000..13a439b --- /dev/null +++ b/lib/rdf/n3/algebra/math/floor.rb @@ -0,0 +1,28 @@ +module RDF::N3::Algebra::Math + ## + # The object is calculated as the subject downwards to a whole number. + # + # @see https://www.w3.org/TR/xpath-functions/#func-floor + class Floor < RDF::N3::Algebra::ResourceOperator + NAME = :mathFloor + URI = RDF::N3::Math.floor + + ## + # The math:floor operator takes string or number and calculates its floor. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + RDF::Literal(resource.as_number.floor) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/math/greaterThan.rb b/lib/rdf/n3/algebra/math/greaterThan.rb deleted file mode 100644 index 890675a..0000000 --- a/lib/rdf/n3/algebra/math/greaterThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is greater than the number of which the object is a string representation. - class GreaterThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathGreaterThan - end -end diff --git a/lib/rdf/n3/algebra/math/greater_than.rb b/lib/rdf/n3/algebra/math/greater_than.rb new file mode 100644 index 0000000..77f9a91 --- /dev/null +++ b/lib/rdf/n3/algebra/math/greater_than.rb @@ -0,0 +1,41 @@ +module RDF::N3::Algebra::Math + ## + # True iff the subject is a string representation of a number which is greater than the number of which the object is a string representation. + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-greater-than + class GreaterThan < RDF::N3::Algebra::ResourceOperator + NAME = :mathGreaterThan + URI = RDF::N3::Math.greaterThan + + ## + # Resolves inputs as numbers. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource.as_number if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # Returns TRUE if `term1` is greater than `term2`. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(term1 > term2) + end + end +end diff --git a/lib/rdf/n3/algebra/math/integerQuotient.rb b/lib/rdf/n3/algebra/math/integerQuotient.rb deleted file mode 100644 index 5d1287b..0000000 --- a/lib/rdf/n3/algebra/math/integerQuotient.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # The subject is a pair of integer numbers. The object is calculated by dividing the first number of the pair by the second, ignoring remainder. - class IntegerQuotient < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathIntegerQuotient - end -end diff --git a/lib/rdf/n3/algebra/math/lessThan.rb b/lib/rdf/n3/algebra/math/lessThan.rb deleted file mode 100644 index c37aac0..0000000 --- a/lib/rdf/n3/algebra/math/lessThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is LESS than a number of which the object is a string representation. - class LessThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathLessThan - end -end diff --git a/lib/rdf/n3/algebra/math/less_than.rb b/lib/rdf/n3/algebra/math/less_than.rb new file mode 100644 index 0000000..c2ace32 --- /dev/null +++ b/lib/rdf/n3/algebra/math/less_than.rb @@ -0,0 +1,41 @@ +module RDF::N3::Algebra::Math + ## + # True iff the subject is a string representation of a number which is less than the number of which the object is a string representation. + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-less-than + class LessThan < RDF::N3::Algebra::ResourceOperator + NAME = :mathLessThan + URI = RDF::N3::Math.lessThan + + ## + # Resolves inputs as numbers. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource.as_number if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # Returns TRUE if `term1` is less than `term2`. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(term1 < term2) + end + end +end diff --git a/lib/rdf/n3/algebra/math/memberCount.rb b/lib/rdf/n3/algebra/math/memberCount.rb deleted file mode 100644 index 8e26fae..0000000 --- a/lib/rdf/n3/algebra/math/memberCount.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # The number of items in a list. The subject is a list, the object is calculated as the number of members. - class MemberCount < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathMemberCount - end -end diff --git a/lib/rdf/n3/algebra/math/negation.rb b/lib/rdf/n3/algebra/math/negation.rb index 9538f42..ae1e1a3 100644 --- a/lib/rdf/n3/algebra/math/negation.rb +++ b/lib/rdf/n3/algebra/math/negation.rb @@ -1,9 +1,38 @@ module RDF::N3::Algebra::Math ## # The subject or object is calculated to be the negation of the other. - class Negation < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-unary-minus + class Negation < RDF::N3::Algebra::ResourceOperator + include RDF::N3::Algebra::Builtin NAME = :mathNegation + URI = RDF::N3::Math.negation + + ## + # The math:negation operator takes may have either a bound subject or object. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable + resource + when RDF::Literal + as_literal(-resource.as_number) + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end end end diff --git a/lib/rdf/n3/algebra/math/notEqualTo.rb b/lib/rdf/n3/algebra/math/notEqualTo.rb deleted file mode 100644 index e97c3f3..0000000 --- a/lib/rdf/n3/algebra/math/notEqualTo.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is NOT EQUAL to a number of which the object is a string representation. - class NotEqualTo < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathNotEqualTo - end -end diff --git a/lib/rdf/n3/algebra/math/notGreaterThan.rb b/lib/rdf/n3/algebra/math/notGreaterThan.rb deleted file mode 100644 index fa46ec1..0000000 --- a/lib/rdf/n3/algebra/math/notGreaterThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is NOT greater than the number of which the object is a string representation. - class NotGreaterThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathNotGreaterThan - end -end diff --git a/lib/rdf/n3/algebra/math/notLessThan.rb b/lib/rdf/n3/algebra/math/notLessThan.rb deleted file mode 100644 index 8bd3774..0000000 --- a/lib/rdf/n3/algebra/math/notLessThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Math - ## - # True iff the subject is a string representation of a number which is NOT LESS than a number of which the object is a string representation. - class NotLessThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :mathNotLessThan - end -end diff --git a/lib/rdf/n3/algebra/math/not_equal_to.rb b/lib/rdf/n3/algebra/math/not_equal_to.rb new file mode 100644 index 0000000..058e2c5 --- /dev/null +++ b/lib/rdf/n3/algebra/math/not_equal_to.rb @@ -0,0 +1,25 @@ +module RDF::N3::Algebra::Math + ## + # True iff the subject is a string representation of a number which is NOT EQUAL to a number of which the object is a string representation. + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-equal + class NotEqualTo < EqualTo + NAME = :mathNotEqualTo + URI = RDF::N3::Math.notEqualTo + + ## + # The math:notEqualTo operator takes a pair of strings or numbers and determines if they are not the same numeric value. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/math/not_greater_than.rb b/lib/rdf/n3/algebra/math/not_greater_than.rb new file mode 100644 index 0000000..e939c96 --- /dev/null +++ b/lib/rdf/n3/algebra/math/not_greater_than.rb @@ -0,0 +1,25 @@ +module RDF::N3::Algebra::Math + ## + # True iff the subject is a string representation of a number which is NOT greater than the number of which the object is a string representation. + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-greater-than + class NotGreaterThan < GreaterThan + NAME = :mathNotGreaterThan + URI = RDF::N3::Math.notGreaterThan + + ## + # Returns TRUE if `term1` is less than or equal to `term2`. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/math/not_less_than.rb b/lib/rdf/n3/algebra/math/not_less_than.rb new file mode 100644 index 0000000..a373e5b --- /dev/null +++ b/lib/rdf/n3/algebra/math/not_less_than.rb @@ -0,0 +1,25 @@ +module RDF::N3::Algebra::Math + ## + # True iff the subject is a string representation of a number which is NOT LESS than a number of which the object is a string representation. + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-less-than + class NotLessThan < LessThan + NAME = :mathNotLessThan + URI = RDF::N3::Math.notLessThan + + ## + # Returns TRUE if `term1` is greater than or equal to `term2`. + # + # @param [RDF::Term] term1 + # an RDF term + # @param [RDF::Term] term2 + # an RDF term + # @return [RDF::Literal::Boolean] `true` or `false` + # @raise [TypeError] if either operand is not an RDF term or operands are not comperable + # + # @see RDF::Term#== + def apply(term1, term2) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/math/product.rb b/lib/rdf/n3/algebra/math/product.rb index 6634b29..9db6b6d 100644 --- a/lib/rdf/n3/algebra/math/product.rb +++ b/lib/rdf/n3/algebra/math/product.rb @@ -1,9 +1,20 @@ module RDF::N3::Algebra::Math ## # The subject is a list of numbers. The object is calculated as the arithmentic product of those numbers. - class Product < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-multiply + class Product < RDF::N3::Algebra::ListOperator NAME = :mathProduct + URI = RDF::N3::Math.product + + ## + # The math:product operator takes a list of strings or numbers and calculates their sum. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:*) || RDF::Literal(1) # Empty list product is 1 + end end end diff --git a/lib/rdf/n3/algebra/math/quotient.rb b/lib/rdf/n3/algebra/math/quotient.rb index 5050c7e..8fd0c54 100644 --- a/lib/rdf/n3/algebra/math/quotient.rb +++ b/lib/rdf/n3/algebra/math/quotient.rb @@ -1,9 +1,36 @@ module RDF::N3::Algebra::Math ## # The subject is a pair of numbers. The object is calculated by dividing the first number of the pair by the second. - class Quotient < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-divide + class Quotient < RDF::N3::Algebra::ListOperator NAME = :mathQuotient + URI = RDF::N3::Math.quotient + + ## + # The math:quotient operator takes a pair of strings or numbers and calculates their quotient. + # + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:/) + end + + ## + # The list argument must be a pair of literals. + # + # @param [RDF::N3::List] list + # @return [Boolean] + # @see RDF::N3::ListOperator#validate + def validate(list) + if super && list.all? {|le| le.is_a?(RDF::Literal)} && list.length == 2 + true + else + log_error(NAME) {"list is not a pair of literals: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/math/remainder.rb b/lib/rdf/n3/algebra/math/remainder.rb index 4d26d01..7ec6c90 100644 --- a/lib/rdf/n3/algebra/math/remainder.rb +++ b/lib/rdf/n3/algebra/math/remainder.rb @@ -1,9 +1,35 @@ module RDF::N3::Algebra::Math ## # The subject is a pair of integers. The object is calculated by dividing the first number of the pair by the second and taking the remainder. - class Remainder < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-mod + class Remainder < RDF::N3::Algebra::ListOperator NAME = :mathRemainder + URI = RDF::N3::Math.remainder + + ## + # The math:remainder operator takes a pair of strings or numbers and calculates their remainder. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:%) + end + + ## + # The list argument must be a pair of literals. + # + # @param [RDF::N3::List] list + # @return [Boolean] + # @see RDF::N3::ListOperator#validate + def validate(list) + if super && list.all? {|li| li.is_a?(RDF::Literal) && li.as_number.is_a?(RDF::Literal::Integer)} && list.length == 2 + true + else + log_error(NAME) {"list is not a pair of integers: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/math/rounded.rb b/lib/rdf/n3/algebra/math/rounded.rb index 1c9ab50..b2f2087 100644 --- a/lib/rdf/n3/algebra/math/rounded.rb +++ b/lib/rdf/n3/algebra/math/rounded.rb @@ -1,9 +1,26 @@ module RDF::N3::Algebra::Math ## # The object is calulated as the subject rounded to the nearest integer. - class Rounded < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Rounded < RDF::N3::Algebra::ResourceOperator NAME = :mathRounded + URI = RDF::N3::Math.rounded + + ## + # The math:rounded operator takes string or number rounds it to the next integer. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + as_literal(resource.as_number.round) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end end end diff --git a/lib/rdf/n3/algebra/math/sin.rb b/lib/rdf/n3/algebra/math/sin.rb new file mode 100644 index 0000000..c7a91df --- /dev/null +++ b/lib/rdf/n3/algebra/math/sin.rb @@ -0,0 +1,40 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the sine value of the subject. + # + # @see https://www.w3.org/TR/xpath-functions/#func-math-sin + class Sin < RDF::N3::Algebra::ResourceOperator + NAME = :mathSin + URI = RDF::N3::Math.sin + + ## + # The math:sin operator takes string or number and calculates its sine. The arc sine of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.sin(resource.as_number.object)) + when :object + as_literal(Math.sinh(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/sinh.rb b/lib/rdf/n3/algebra/math/sinh.rb new file mode 100644 index 0000000..e58e3a4 --- /dev/null +++ b/lib/rdf/n3/algebra/math/sinh.rb @@ -0,0 +1,38 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the hyperbolic sine value of the subject. + class SinH < RDF::N3::Algebra::ResourceOperator + NAME = :mathSinH + URI = RDF::N3::Math.sinh + + ## + # The math:sinh operator takes string or number and calculates its hyperbolic sine. The inverse hyperbolic sine of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.sinh(resource.as_number.object)) + when :object + as_literal(Math.asinh(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/sum.rb b/lib/rdf/n3/algebra/math/sum.rb index f8b0fcb..dac1f86 100644 --- a/lib/rdf/n3/algebra/math/sum.rb +++ b/lib/rdf/n3/algebra/math/sum.rb @@ -1,9 +1,40 @@ module RDF::N3::Algebra::Math ## - # The subject is a list of numbers. The object is calculated as the arithmentic sum of those numbers. - class Sum < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # **schema**: + # `($a_1 .. $a_n) math:sum $a_s` + # + # **summary**: + # performs addition of numbers + # + # **definition**: + # `true` if and only if the arithmetic sum of `$a_1, .. $a_n` equals `$a_s`. + # Requires either: + # + # 1. all `$a_1, .., $a_n` to be bound; or + # 2. all but one `$a_i` (subject list) to be bound, and `$a_s` to be bound. + # + # **literal domains**: + # + # * `$a_1 .. $a_n` : `xs:decimal` (or its derived types), `xs:float`, or `xs:double` (see note on type promotion, and casting from string) + # * `$a_s`: `xs:decimal` (or its derived types), `xs:float`, or `xs:double` (see note on type promotion, and casting from string) + # + # @example + # { ("3" "5") math:sum ?x } => { ?x :valueOf "3 + 5" } . + # { (3 5) math:sum ?x } => { ?x :valueOf "3 + 5 = 8" } . + # + # @see https://www.w3.org/TR/xpath-functions/#func-numeric-add + class Sum < RDF::N3::Algebra::ListOperator NAME = :mathSum + URI = RDF::N3::Math[:sum] + + ## + # Evaluates to the sum of the list elements + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + list.to_a.map(&:as_number).reduce(&:+) || RDF::Literal(0) # Empty list sums to 0 + end end end diff --git a/lib/rdf/n3/algebra/math/tan.rb b/lib/rdf/n3/algebra/math/tan.rb new file mode 100644 index 0000000..4132791 --- /dev/null +++ b/lib/rdf/n3/algebra/math/tan.rb @@ -0,0 +1,40 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the tangent value of the subject. + # + # @see https://www.w3.org/TR/xpath-functions/#func-math-tan + class Tan < RDF::N3::Algebra::ResourceOperator + NAME = :mathTan + URI = RDF::N3::Math.tan + + ## + # The math:tan operator takes string or number and calculates its tangent. The arc tangent of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.tan(resource.as_number.object)) + when :object + as_literal(Math.atan(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/math/tanh.rb b/lib/rdf/n3/algebra/math/tanh.rb new file mode 100644 index 0000000..8a81073 --- /dev/null +++ b/lib/rdf/n3/algebra/math/tanh.rb @@ -0,0 +1,38 @@ +module RDF::N3::Algebra::Math + ## + # The subject is an angle expressed in radians. The object is calulated as the tangent value of the subject. + class TanH < RDF::N3::Algebra::ResourceOperator + NAME = :mathTanH + URI = RDF::N3::Math.tanh + + ## + # The math:tanh operator takes string or number and calculates its hyperbolic tangent. The inverse hyperbolic tangent of a concrete object can also calculate a variable subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case resource + when RDF::Query::Variable then resource + when RDF::Literal + case position + when :subject + as_literal(Math.tanh(resource.as_number.object)) + when :object + as_literal(Math.atanh(resource.as_number.object)) + end + else + nil + end + end + + ## + # Input is either the subject or object + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/not_implemented.rb b/lib/rdf/n3/algebra/not_implemented.rb new file mode 100644 index 0000000..38bff76 --- /dev/null +++ b/lib/rdf/n3/algebra/not_implemented.rb @@ -0,0 +1,13 @@ +require 'rdf' + +module RDF::N3::Algebra + # + # A Notation3 Formula combines a graph with a BGP query. + class NotImplemented < SPARQL::Algebra::Operator + include RDF::N3::Algebra::Builtin + + def initialize(*args, predicate:, **options) + raise NotImplementedError, "The #{predicate} operator is not implemented" + end + end +end \ No newline at end of file diff --git a/lib/rdf/n3/algebra/resource_operator.rb b/lib/rdf/n3/algebra/resource_operator.rb new file mode 100644 index 0000000..0d1d3c1 --- /dev/null +++ b/lib/rdf/n3/algebra/resource_operator.rb @@ -0,0 +1,123 @@ +module RDF::N3::Algebra + ## + # This is a generic operator where the subject is a literal or binds to a literal and the object is either a constant that equals the evaluation of the subject, or a variable to which the result is bound in a solution + class ResourceOperator < SPARQL::Algebra::Operator::Binary + include SPARQL::Algebra::Query + include SPARQL::Algebra::Update + include RDF::N3::Algebra::Builtin + + NAME = :resourceOperator + + ## + # The operator takes a literal and provides a mechanism for subclasses to operate over (and validate) that argument. + # + # @param [RDF::Queryable] queryable + # the graph or repository to query + # @param [RDF::Query::Solutions] solutions + # solutions for chained queries + # @return [RDF::Query::Solutions] + def execute(queryable, solutions:, **options) + RDF::Query::Solutions(solutions.map do |solution| + subject = operand(0).evaluate(solution.bindings, formulae: formulae) || operand(0) + object = operand(1).evaluate(solution.bindings, formulae: formulae) || operand(1) + subject = formulae.fetch(subject, subject) if subject.node? + object = formulae.fetch(object, object) if object.node? + + log_info(self.class.const_get(:NAME), "subject") {SXP::Generator.string(subject.to_sxp_bin).strip} + log_info(self.class.const_get(:NAME), "object") {SXP::Generator.string(object.to_sxp_bin).strip} + next unless valid?(subject, object) + + lhs = resolve(subject, position: :subject) + if lhs.nil? + log_error(self.class.const_get(:NAME), "subject evaluates to null") {subject.inspect} + next + end + + rhs = resolve(object, position: :object) + if rhs.nil? + log_error(self.class.const_get(:NAME), "object evaluates to null") {object.inspect} + next + end + + if object.variable? + log_debug(self.class.const_get(:NAME), "result") {SXP::Generator.string(lhs.to_sxp_bin).strip} + solution.merge(object.to_sym => lhs) + elsif subject.variable? + log_debug(self.class.const_get(:NAME), "result") {SXP::Generator.string(rhs.to_sxp_bin).strip} + solution.merge(subject.to_sym => rhs) + elsif respond_to?(:apply) + res = apply(lhs, rhs) + log_debug(self.class.const_get(:NAME), "result") {SXP::Generator.string(res.to_sxp_bin).strip} + # Return the result applying subject and object + #require 'byebug'; byebug + case res + when RDF::Literal::TRUE + solution + when RDF::Literal::FALSE + nil + when RDF::Query::Solution + solution.merge(res) + else + log_error(self.class.const_get(:NAME), "unexpected result type") + nil + end + elsif rhs != lhs + log_debug(self.class.const_get(:NAME), "result: false") + nil + else + log_debug(self.class.const_get(:NAME), "result: true") + solution + end + end.compact.uniq) + end + + ## + # Input is generically the subject + # + # @return [RDF::Term] + def input_operand + operand(0) + end + + ## + # Subclasses implement `resolve`. + # + # Returns nil if resource does not validate, given its position + # + # @param [RDF::Term] resource + # @return [RDF::Term] + def resolve(resource, position: :subject) + raise NotImplemented + end + + ## + # Subclasses may override or supplement validate to perform validation on the list subject + # + # @param [RDF::Term] subject + # @param [RDF::Term] object + # @return [Boolean] + def valid?(subject, object) + case subject + when RDF::Query::Variable + object.term? + when RDF::Term + object.term? || object.variable? + else + false + end + end + + ## + # Returns a literal for the numeric argument. + def as_literal(object) + case object + when Float + literal = RDF::Literal(object, canonicalize: true) + literal.instance_variable_set(:@string, literal.to_s.downcase) + literal + else + RDF::Literal(object, canonicalize: true) + end + end + end +end diff --git a/lib/rdf/n3/algebra/str/concatenation.rb b/lib/rdf/n3/algebra/str/concatenation.rb index ac8b500..8f37f91 100644 --- a/lib/rdf/n3/algebra/str/concatenation.rb +++ b/lib/rdf/n3/algebra/str/concatenation.rb @@ -1,9 +1,27 @@ module RDF::N3::Algebra::Str ## # The subject is a list of strings. The object is calculated as a concatenation of those strings. - class Concatenation < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # + # @example + # ("a" "b") string:concatenation :s + class Concatenation < RDF::N3::Algebra::ListOperator NAME = :strConcatenation + URI = RDF::N3::Str.concatenation + + ## + # The string:concatenation operator takes a list of terms cast to strings and either binds the result of concatenating them to the output variable, removes a solution that does equal the literal object. + # + # List entries are stringified using {SPARQL::Algebra::Expression.cast}. + # + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + RDF::Literal( + list.to_a.map do |o| + SPARQL::Algebra::Expression.cast(RDF::XSD.string, o) + end.join("") + ) + end end end diff --git a/lib/rdf/n3/algebra/str/contains.rb b/lib/rdf/n3/algebra/str/contains.rb index 6f38c1e..1df7a7e 100644 --- a/lib/rdf/n3/algebra/str/contains.rb +++ b/lib/rdf/n3/algebra/str/contains.rb @@ -1,9 +1,33 @@ module RDF::N3::Algebra::Str - ## # True iff the subject string contains the object string. - class Contains < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Contains < RDF::N3::Algebra::ResourceOperator NAME = :strContains + URI = RDF::N3::Str.contains + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.term? + end + + # Neither subect nor object are considered inputs, and must be resolved before evaluation. + def input_operand + RDF::N3::List.new + end + + ## + # @param [String] left + # a literal + # @param [String] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s.include?(right.to_s)) + end end end diff --git a/lib/rdf/n3/algebra/str/containsIgnoringCase.rb b/lib/rdf/n3/algebra/str/containsIgnoringCase.rb deleted file mode 100644 index 01c00ce..0000000 --- a/lib/rdf/n3/algebra/str/containsIgnoringCase.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the subject string contains the object string, with the comparison done ignoring the difference between upper case and lower case characters. - class ContainsIgnoringCase < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strContainsIgnoringCase - end -end diff --git a/lib/rdf/n3/algebra/str/contains_ignoring_case.rb b/lib/rdf/n3/algebra/str/contains_ignoring_case.rb new file mode 100644 index 0000000..430e0e0 --- /dev/null +++ b/lib/rdf/n3/algebra/str/contains_ignoring_case.rb @@ -0,0 +1,33 @@ +module RDF::N3::Algebra::Str + # True iff the subject string contains the object string, with the comparison done ignoring the difference between upper case and lower case characters. + class ContainsIgnoringCase < RDF::N3::Algebra::ResourceOperator + NAME = :strContainsIgnoringCase + URI = RDF::N3::Str.containsIgnoringCase + + ## + # Resolves inputs as lower-case strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + RDF::Literal(resource.to_s.downcase) if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [String] left + # a literal + # @param [String] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s.include?(right.to_s)) + end + end +end diff --git a/lib/rdf/n3/algebra/str/endsWith.rb b/lib/rdf/n3/algebra/str/endsWith.rb deleted file mode 100644 index f7ec877..0000000 --- a/lib/rdf/n3/algebra/str/endsWith.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the subject string ends with the object string. - class EndsWith < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strEndsWith - end -end diff --git a/lib/rdf/n3/algebra/str/ends_with.rb b/lib/rdf/n3/algebra/str/ends_with.rb new file mode 100644 index 0000000..16a6a10 --- /dev/null +++ b/lib/rdf/n3/algebra/str/ends_with.rb @@ -0,0 +1,33 @@ +module RDF::N3::Algebra::Str + # True iff the subject string ends with the object string. + class EndsWith < RDF::N3::Algebra::ResourceOperator + NAME = :strEndsWith + URI = RDF::N3::Str.endsWith + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s.end_with?(right.to_s)) + end + end +end diff --git a/lib/rdf/n3/algebra/str/equalIgnoringCase.rb b/lib/rdf/n3/algebra/str/equalIgnoringCase.rb deleted file mode 100644 index fe51875..0000000 --- a/lib/rdf/n3/algebra/str/equalIgnoringCase.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the subject string is the same as object string ignoring differences between upper and lower case. - class EqualIgnoringCase < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strEqualIgnoringCase - end -end diff --git a/lib/rdf/n3/algebra/str/equal_ignoring_case.rb b/lib/rdf/n3/algebra/str/equal_ignoring_case.rb new file mode 100644 index 0000000..865f235 --- /dev/null +++ b/lib/rdf/n3/algebra/str/equal_ignoring_case.rb @@ -0,0 +1,34 @@ +module RDF::N3::Algebra::Str + class EqualIgnoringCase < RDF::N3::Algebra::ResourceOperator + NAME = :strEqualIgnoringCase + URI = RDF::N3::Str.equalIgnoringCase + + ## + # Resolves inputs as lower-case strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + RDF::Literal(resource.to_s.downcase) if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # True iff the subject string is the same as object string ignoring differences between upper and lower case. + # + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s == right.to_s) + end + end +end diff --git a/lib/rdf/n3/algebra/str/format.rb b/lib/rdf/n3/algebra/str/format.rb index 61724bf..f3b493d 100644 --- a/lib/rdf/n3/algebra/str/format.rb +++ b/lib/rdf/n3/algebra/str/format.rb @@ -1,9 +1,17 @@ module RDF::N3::Algebra::Str - ## # The subject is a list, whose first member is a format string, and whose remaining members are arguments to the format string. The formating string is in the style of python's % operator, very similar to C's sprintf(). The object is calculated from the subject. - class Format < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Format < RDF::N3::Algebra::ListOperator + include RDF::N3::Algebra::Builtin NAME = :strFormat + URI = RDF::N3::Str.format + + ## + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + format, *args = list.to_a.map(&:value) + str = RDF::Literal(format % args) + end end end diff --git a/lib/rdf/n3/algebra/str/greaterThan.rb b/lib/rdf/n3/algebra/str/greaterThan.rb deleted file mode 100644 index 488606a..0000000 --- a/lib/rdf/n3/algebra/str/greaterThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the string is greater than the object when ordered according to Unicode(tm) code order - class GreaterThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strGreaterThan - end -end diff --git a/lib/rdf/n3/algebra/str/greater_than.rb b/lib/rdf/n3/algebra/str/greater_than.rb new file mode 100644 index 0000000..f535847 --- /dev/null +++ b/lib/rdf/n3/algebra/str/greater_than.rb @@ -0,0 +1,38 @@ +module RDF::N3::Algebra::Str + # True iff the string is greater than the object when ordered according to Unicode(tm) code order. + class GreaterThan < RDF::N3::Algebra::ResourceOperator + NAME = :strGreaterThan + URI = RDF::N3::Str.greaterThan + + ## + # The string:greaterThan compares subject with object as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.literal? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + case + when !left.is_a?(RDF::Term) || !right.is_a?(RDF::Term) || !left.compatible?(right) + log_error(NAME) {"expected two RDF::Literal operands, but got #{left.inspect} and #{right.inspect}"} + when left > right then RDF::Literal::TRUE + else RDF::Literal::FALSE + end + end + end +end diff --git a/lib/rdf/n3/algebra/str/lessThan.rb b/lib/rdf/n3/algebra/str/lessThan.rb deleted file mode 100644 index fabbb90..0000000 --- a/lib/rdf/n3/algebra/str/lessThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the string is less than the object when ordered according to Unicode(tm) code order. - class LessThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strLessThan - end -end diff --git a/lib/rdf/n3/algebra/str/less_than.rb b/lib/rdf/n3/algebra/str/less_than.rb new file mode 100644 index 0000000..f821f88 --- /dev/null +++ b/lib/rdf/n3/algebra/str/less_than.rb @@ -0,0 +1,33 @@ +module RDF::N3::Algebra::Str + # True iff the string is less than the object when ordered according to Unicode(tm) code order. + class LessThan < RDF::N3::Algebra::ResourceOperator + NAME = :strLessThan + URI = RDF::N3::Str.lessThan + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s < right.to_s) + end + end +end diff --git a/lib/rdf/n3/algebra/str/matches.rb b/lib/rdf/n3/algebra/str/matches.rb index cc5b6e7..28f136b 100644 --- a/lib/rdf/n3/algebra/str/matches.rb +++ b/lib/rdf/n3/algebra/str/matches.rb @@ -1,9 +1,37 @@ module RDF::N3::Algebra::Str - ## - # The subject is a string; the object is is a regular expression in the perl, python style. It is true iff the string matches the regexp. - class Matches < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # The subject is a string; the object is is a regular expression in the perl, python style. + # It is true iff the string matches the regexp. + class Matches < RDF::N3::Algebra::ResourceOperator NAME = :strMatches + URI = RDF::N3::Str.matches + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.literal? + end + + # Neither subect nor object are considered inputs, and must be resolved before evaluation. + def input_operand + RDF::N3::List.new + end + + ## + # Tre if right, treated as a regular expression, matches left + # + # @param [RDF::Literal] left + # a simple literal + # @param [RDF::Literal] right + # a simple literal + # @return [RDF::Literal::Boolean] `true` or `false` + # @see https://www.w3.org/TR/xpath-functions/#regex-syntax + def apply(left, right) + RDF::Literal(Regexp.new(right.to_s).match?(left.to_s)) + end end end diff --git a/lib/rdf/n3/algebra/str/notEqualIgnoringCase.rb b/lib/rdf/n3/algebra/str/notEqualIgnoringCase.rb deleted file mode 100644 index 7795f6c..0000000 --- a/lib/rdf/n3/algebra/str/notEqualIgnoringCase.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the subject string is the NOT same as object string ignoring differences between upper and lower case. - class NotEqualIgnoringCase < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strNotEqualIgnoringCase - end -end diff --git a/lib/rdf/n3/algebra/str/notGreaterThan.rb b/lib/rdf/n3/algebra/str/notGreaterThan.rb deleted file mode 100644 index fba338c..0000000 --- a/lib/rdf/n3/algebra/str/notGreaterThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the string is NOT greater than the object when ordered according to Unicode(tm) code order. - class NotGreaterThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strNotGreaterThan - end -end diff --git a/lib/rdf/n3/algebra/str/notLessThan.rb b/lib/rdf/n3/algebra/str/notLessThan.rb deleted file mode 100644 index 81ca196..0000000 --- a/lib/rdf/n3/algebra/str/notLessThan.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the string is NOT less than the object when ordered according to Unicode(tm) code order. - class NotLessThan < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strNotLessThan - end -end diff --git a/lib/rdf/n3/algebra/str/notMatches.rb b/lib/rdf/n3/algebra/str/notMatches.rb deleted file mode 100644 index 64ca466..0000000 --- a/lib/rdf/n3/algebra/str/notMatches.rb +++ /dev/null @@ -1,9 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # The subject string; the object is is a regular expression in the perl, python style. It is true iff the string does NOT match the regexp. - class NotMatches < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - - NAME = :strNotMatches - end -end diff --git a/lib/rdf/n3/algebra/str/not_equal_ignoring_case.rb b/lib/rdf/n3/algebra/str/not_equal_ignoring_case.rb new file mode 100644 index 0000000..d2652bc --- /dev/null +++ b/lib/rdf/n3/algebra/str/not_equal_ignoring_case.rb @@ -0,0 +1,17 @@ +module RDF::N3::Algebra::Str + # True iff the subject string is the NOT same as object string ignoring differences between upper and lower case. + class NotEqualIgnoringCase < EqualIgnoringCase + NAME = :strNotEqualIgnoringCase + URI = RDF::N3::Str.notEqualIgnoringCase + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/str/not_greater_than.rb b/lib/rdf/n3/algebra/str/not_greater_than.rb new file mode 100644 index 0000000..04d8f86 --- /dev/null +++ b/lib/rdf/n3/algebra/str/not_greater_than.rb @@ -0,0 +1,17 @@ +module RDF::N3::Algebra::Str + # True iff the string is NOT greater than the object when ordered according to Unicode(tm) code order. + class NotGreaterThan < GreaterThan + NAME = :strNotGreaterThan + URI = RDF::N3::Str.notGreaterThan + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/str/not_less_than.rb b/lib/rdf/n3/algebra/str/not_less_than.rb new file mode 100644 index 0000000..e09db2b --- /dev/null +++ b/lib/rdf/n3/algebra/str/not_less_than.rb @@ -0,0 +1,17 @@ +module RDF::N3::Algebra::Str + # True iff the string is NOT less than the object when ordered according to Unicode(tm) code order. + class NotLessThan < LessThan + NAME = :strNotLessThan + URI = RDF::N3::Str.notLessThan + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/str/not_matches.rb b/lib/rdf/n3/algebra/str/not_matches.rb new file mode 100644 index 0000000..41d6d01 --- /dev/null +++ b/lib/rdf/n3/algebra/str/not_matches.rb @@ -0,0 +1,18 @@ +module RDF::N3::Algebra::Str + # The subject string; the object is a regular expression in the perl, python style. It is true iff the string does NOT match the regexp. + class NotMatches < Matches + NAME = :strNotMatches + URI = RDF::N3::Str.notMatches + + ## + # @param [RDF::Literal] text + # a simple literal + # @param [RDF::Literal] pattern + # a simple literal + # @return [RDF::Literal::Boolean] `true` or `false` + # @see https://www.w3.org/TR/xpath-functions/#regex-syntax + def apply(text, pattern) + RDF::Literal(super != RDF::Literal::TRUE) + end + end +end diff --git a/lib/rdf/n3/algebra/str/replace.rb b/lib/rdf/n3/algebra/str/replace.rb index bc2ec47..fe8e619 100644 --- a/lib/rdf/n3/algebra/str/replace.rb +++ b/lib/rdf/n3/algebra/str/replace.rb @@ -1,12 +1,35 @@ module RDF::N3::Algebra::Str - ## - # A built-in for replacing characters or sub. takes a list of 3 strings; the first is the input data, the second the old and the third the new string. The object is calculated as the rplaced string. + # A built-in for replacing characters or sub. takes a list of 3 strings; the first is the input data, the second the old and the third the new string. The object is calculated as the replaced string. # # @example # ("fofof bar", "of", "baz") string:replace "fbazbaz bar" - class Replace < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + class Replace < RDF::N3::Algebra::ListOperator + include RDF::N3::Algebra::Builtin NAME = :strReplace + URI = RDF::N3::Str.replace + + ## + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + format, *args = list.to_a.map(&:value) + input, old_str, new_str = list.to_a + RDF::Literal(input.to_s.gsub(old_str.to_s, new_str.to_s)) + end + + ## + # Subclasses may override or supplement validate to perform validation on the list subject + # + # @param [RDF::N3::List] list + # @return [Boolean] + def validate(list) + if super && list.length == 3 && list.to_a.all?(&:literal?) + true + else + log_error(NAME) {"list must have exactly three entries: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/str/scrape.rb b/lib/rdf/n3/algebra/str/scrape.rb index a941651..53e8227 100644 --- a/lib/rdf/n3/algebra/str/scrape.rb +++ b/lib/rdf/n3/algebra/str/scrape.rb @@ -1,9 +1,35 @@ module RDF::N3::Algebra::Str - ## - # The subject is a list of two strings. The second string is a regular expression in the perl, python style. It must contain one group (a part in parentheses). If the first string in the list matches the regular expression, then the object is calculated as being thepart of the first string which matches the group. - class Scrape < SPARQL::Algebra::Operator::Binary - include RDF::Util::Logger - + # The subject is a list of two strings. The second string is a regular expression in the perl, python style. It must contain one group (a part in parentheses). If the first string in the list matches the regular expression, then the object is calculated as being the part of the first string which matches the group. + # + # @example + # ("abcdef" "ab(..)ef") string:scrape "cd" + class Scrape < RDF::N3::Algebra::ListOperator + include RDF::N3::Algebra::Builtin NAME = :strScrape + URI = RDF::N3::Str.scrape + + ## + # @param [RDF::N3::List] list + # @return [RDF::Term] + # @see RDF::N3::ListOperator#evaluate + def resolve(list) + input, regex = list.to_a + md = Regexp.new(regex.to_s).match(input.to_s) + RDF::Literal(md[1]) if md + end + + ## + # Subclasses may override or supplement validate to perform validation on the list subject + # + # @param [RDF::N3::List] list + # @return [Boolean] + def validate(list) + if super && list.length == 2 + true + else + log_error(NAME) {"list must have exactly two entries: #{list.to_sxp}"} + false + end + end end end diff --git a/lib/rdf/n3/algebra/str/startsWith.rb b/lib/rdf/n3/algebra/str/startsWith.rb deleted file mode 100644 index cda654c..0000000 --- a/lib/rdf/n3/algebra/str/startsWith.rb +++ /dev/null @@ -1,56 +0,0 @@ -module RDF::N3::Algebra::Str - ## - # True iff the subject string starts with the object string. - class StartsWith < SPARQL::Algebra::Operator::Binary - include SPARQL::Algebra::Query - include SPARQL::Algebra::Update - include RDF::Enumerable - include RDF::Util::Logger - - NAME = :strStartsWith - - ## - # The string:startsWith operator corresponds to the XPath fn:starts-with function. The arguments must be argument compatible otherwise an error is raised. - # - # For constant inputs that evaulate to true, the original solutions are returned. - # - # For constant inputs that evaluate to false, the empty solution set is returned. XXX - # - # Otherwise, for variable operands, it binds matching variables to the solution set. - # - # @param [RDF::Queryable] queryable - # @param [RDF::Query::Solutions] solutions - # @return [RDF::Query::Solutions] - # @raise [TypeError] if operands are not compatible - def execute(queryable, solutions:, **options) - log_debug {"strStartsWith #{operands.to_sxp}"} - @solutions = solutions.filter do |solution| - left, right = operands.map {|op| op.evaluate(solution.bindings)} - if !left.compatible?(right) - log_debug {"(strStartsWith incompatible operands #{[left, right].to_sxp})"} - false - elsif !left.to_s.start_with?(right.to_s) - log_debug {"(strStartsWith false #{[left, right].to_sxp})"} - false - else - log_debug {"(strStartsWith true #{[left, right].to_sxp})"} - true - end - end - end - - ## - # Does not yield statements. - # - # @yield [statement] - # each matching statement - # @yieldparam [RDF::Statement] solution - # @yieldreturn [void] ignored - def each(&block) - end - - # Graph name associated with this operation, using the name of the parent - # @return [RDF::Resource] - def graph_name; parent.graph_name; end - end -end diff --git a/lib/rdf/n3/algebra/str/starts_with.rb b/lib/rdf/n3/algebra/str/starts_with.rb new file mode 100644 index 0000000..41bfd87 --- /dev/null +++ b/lib/rdf/n3/algebra/str/starts_with.rb @@ -0,0 +1,33 @@ +module RDF::N3::Algebra::Str + # True iff the subject string starts with the object string. + class StartsWith < RDF::N3::Algebra::ResourceOperator + NAME = :strStartsWith + URI = RDF::N3::Str.startsWith + + ## + # Resolves inputs as strings. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Literal] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + resource if resource.term? + end + + # Both subject and object are inputs. + def input_operand + RDF::N3::List.new(values: operands) + end + + ## + # @param [RDF::Literal] left + # a literal + # @param [RDF::Literal] right + # a literal + # @return [RDF::Literal::Boolean] + def apply(left, right) + RDF::Literal(left.to_s.start_with?(right.to_s)) + end + end +end diff --git a/lib/rdf/n3/algebra/time/day.rb b/lib/rdf/n3/algebra/time/day.rb new file mode 100644 index 0000000..4e15591 --- /dev/null +++ b/lib/rdf/n3/algebra/time/day.rb @@ -0,0 +1,35 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:day is the day of the month. + # + # @see https://www.w3.org/TR/xpath-functions/#func-day-from-dateTime + class Day < RDF::N3::Algebra::ResourceOperator + NAME = :timeDay + URI = RDF::N3::Time.day + + ## + # The time:day operator takes string or dateTime and extracts the day component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%d").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no day unless it was specified in the lexical form + def valid?(subject, object) + subject.value.match?(%r(^\d{4}-\d{2}-\d{2})) + end + end +end diff --git a/lib/rdf/n3/algebra/time/day_of_week.rb b/lib/rdf/n3/algebra/time/day_of_week.rb new file mode 100644 index 0000000..9eda297 --- /dev/null +++ b/lib/rdf/n3/algebra/time/day_of_week.rb @@ -0,0 +1,27 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:dayOfWeek is the the day number within the week, Sunday being 0. + class DayOfWeek < RDF::N3::Algebra::ResourceOperator + NAME = :timeDayOfWeek + URI = RDF::N3::Time.dayOfWeek + + ## + # The time:dayOfWeek operator takes string or dateTime and returns the 0-based day of the week. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%w").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/time/gm_time.rb b/lib/rdf/n3/algebra/time/gm_time.rb new file mode 100644 index 0000000..9b170ef --- /dev/null +++ b/lib/rdf/n3/algebra/time/gm_time.rb @@ -0,0 +1,29 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time format string, its time:gmtime is the result of formatting the Universal Time of processing in the format given. If the format string has zero length, then the ISOdate standard format is used. `[ is time:gmtime of ""]` the therefore the current date time. It will end with "Z" as a timezone code. + # + # @see https://www.w3.org/TR/xpath-functions/#func-current-dateTime + class GmTime < RDF::N3::Algebra::ResourceOperator + NAME = :timeGmTime + URI = RDF::N3::Time.gmTime + + ## + # The time:gmTime operator takes string or dateTime and returns current time formatted according to the subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = "%FT%T%:z" if resource.to_s.empty? + RDF::Literal(DateTime.now.new_offset(0).strftime(resource.to_s)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/time/hour.rb b/lib/rdf/n3/algebra/time/hour.rb new file mode 100644 index 0000000..1a36e8c --- /dev/null +++ b/lib/rdf/n3/algebra/time/hour.rb @@ -0,0 +1,35 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:hour is the hour in the 24 hour clock. + # + # @see https://www.w3.org/TR/xpath-functions/#func-hours-from-dateTime + class Hour < RDF::N3::Algebra::ResourceOperator + NAME = :timeHour + URI = RDF::N3::Time.hour + + ## + # The time:hour operator takes string or dateTime and extracts the hour component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%H").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no hour unless it was specified in the lexical form + def valid?(subject, object) + subject.value.match?(%r(^\d{4}-\d{2}-\d{2}T\d{2})) + end + end +end diff --git a/lib/rdf/n3/algebra/time/in_seconds.rb b/lib/rdf/n3/algebra/time/in_seconds.rb new file mode 100644 index 0000000..8fc346b --- /dev/null +++ b/lib/rdf/n3/algebra/time/in_seconds.rb @@ -0,0 +1,59 @@ +module RDF::N3::Algebra::Time + ## + # Iff the _subject_ is a `xsd:dateTime` and the _object_ is the integer number of seconds since the beginning of the era on a given system. Don't assume a particular value, always test for it. The _object_ can be calculated as a function of the _subject_. + # + # @see https://www.w3.org/TR/xpath-functions/#func-timezone-from-dateTime + class InSeconds < RDF::N3::Algebra::ResourceOperator + NAME = :timeInSeconds + URI = RDF::N3::Time.inSeconds + + ## + # The time:inseconds operator takes may have either a bound subject or object. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + case resource + when RDF::Query::Variable + resource + when RDF::Literal + resource = resource.as_datetime + # Subject evaluates to seconds from the epoc + RDF::Literal::Integer.new(resource.object.strftime("%s")) + else + nil + end + when :object + case resource + when RDF::Query::Variable + resource + when RDF::Literal + resource = resource.as_number + # Object evaluates to the DateTime representation of the seconds form the epoc + RDF::Literal(RDF::Literal::DateTime.new(::Time.at(resource).utc.to_datetime).to_s) + else + nil + end + end + end + + # Either subject or object must be a bound resource + def valid?(subject, object) + return true if subject.literal? || object.literal? + log_error(NAME) {"subject or object are not literals: #{subject.inspect}, #{object.inspect}"} + false + end + + ## + # Return both subject and object operands. + # + # @return [RDF::Term] + def input_operand + RDF::N3::List.new(values: operands) + end + end +end diff --git a/lib/rdf/n3/algebra/time/local_time.rb b/lib/rdf/n3/algebra/time/local_time.rb new file mode 100644 index 0000000..4a28e41 --- /dev/null +++ b/lib/rdf/n3/algebra/time/local_time.rb @@ -0,0 +1,29 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time format string, its time:localTime is the result of formatting the current time of processing and local timezone in the format given. If the format string has zero length, then the ISOdate standrad format is used. [ is time:localTime of ""] the therefore the current date time. It will end with a numeric timezone code or "Z" for UTC (GMT). + # + # @see https://www.w3.org/TR/xpath-functions/#func-current-dateTime + class LocalTime < RDF::N3::Algebra::ResourceOperator + NAME = :timeLocalTime + URI = RDF::N3::Time.localTime + + ## + # The time:localTime operator takes string or dateTime and returns current time formatted according to the subject. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = "%FT%T%:z" if resource.to_s.empty? + RDF::Literal(DateTime.now.strftime(resource.to_s)) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/algebra/time/minute.rb b/lib/rdf/n3/algebra/time/minute.rb new file mode 100644 index 0000000..02dc5cf --- /dev/null +++ b/lib/rdf/n3/algebra/time/minute.rb @@ -0,0 +1,35 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:minute is the minutes component. + # + # @see https://www.w3.org/TR/xpath-functions/#func-minutes-from-dateTime + class Minute < RDF::N3::Algebra::ResourceOperator + NAME = :timeMinute + URI = RDF::N3::Time.minute + + ## + # The time:minute operator takes string or dateTime and extracts the minute component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%M").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no minute unless it was specified in the lexical form + def valid?(subject, object) + subject.value.match?(%r(^\d{4}-\d{2}-\d{2}T\d{2}:\d{2})) + end + end +end diff --git a/lib/rdf/n3/algebra/time/month.rb b/lib/rdf/n3/algebra/time/month.rb new file mode 100644 index 0000000..222fa54 --- /dev/null +++ b/lib/rdf/n3/algebra/time/month.rb @@ -0,0 +1,35 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:month is the two-digit month. + # + # @see https://www.w3.org/TR/xpath-functions/#func-month-from-dateTime + class Month < RDF::N3::Algebra::ResourceOperator + NAME = :timeMonth + URI = RDF::N3::Time.month + + ## + # The time:month operator takes string or dateTime and extracts the month component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%m").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no month unless it was specified in the lexical form + def valid?(subject, object) + subject.value.match?(%r(^\d{4}-\d{2})) + end + end +end diff --git a/lib/rdf/n3/algebra/time/second.rb b/lib/rdf/n3/algebra/time/second.rb new file mode 100644 index 0000000..db1a4b1 --- /dev/null +++ b/lib/rdf/n3/algebra/time/second.rb @@ -0,0 +1,35 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:second is the seconds component. + # + # @see https://www.w3.org/TR/xpath-functions/#func-seconds-from-dateTime + class Second < RDF::N3::Algebra::ResourceOperator + NAME = :timeSecond + URI = RDF::N3::Time.second + + ## + # The time:second operator takes string or dateTime and extracts the seconds component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%S").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no second unless it was specified in the lexical form + def valid?(subject, object) + subject.value.match?(%r(^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})) + end + end +end diff --git a/lib/rdf/n3/algebra/time/timezone.rb b/lib/rdf/n3/algebra/time/timezone.rb new file mode 100644 index 0000000..f1de423 --- /dev/null +++ b/lib/rdf/n3/algebra/time/timezone.rb @@ -0,0 +1,36 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:timeZone is the trailing timezone offset part, e.g. "-05:00". + # + # @see https://www.w3.org/TR/xpath-functions/#func-timezone-from-dateTime + class Timezone < RDF::N3::Algebra::ResourceOperator + NAME = :timeTimezone + URI = RDF::N3::Time.timeZone + + ## + # The time:timeZone operator takes string or dateTime and extracts the timeZone component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%Z")) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + + ## + # There is no timezone unless it was specified in the lexical form and is not "Z" + def valid?(subject, object) + md = subject.value.match(%r(^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[\+-][\d-]+))) + md && md[1].to_s != 'Z' + end + end +end diff --git a/lib/rdf/n3/algebra/time/year.rb b/lib/rdf/n3/algebra/time/year.rb new file mode 100644 index 0000000..58832bd --- /dev/null +++ b/lib/rdf/n3/algebra/time/year.rb @@ -0,0 +1,29 @@ +module RDF::N3::Algebra::Time + ## + # For a date-time, its time:year is the year component. + # + # @see https://www.w3.org/TR/xpath-functions/#func-year-from-dateTime + class Year < RDF::N3::Algebra::ResourceOperator + NAME = :timeYear + URI = RDF::N3::Time.year + + ## + # The time:year operator takes string or dateTime and extracts the year component. + # + # @param [RDF::Term] resource + # @param [:subject, :object] position + # @return [RDF::Term] + # @see RDF::N3::ResourceOperator#evaluate + def resolve(resource, position:) + case position + when :subject + return nil unless resource.literal? + resource = resource.as_datetime + RDF::Literal(resource.object.strftime("%Y").to_i) + when :object + return nil unless resource.literal? || resource.variable? + resource + end + end + end +end diff --git a/lib/rdf/n3/extensions.rb b/lib/rdf/n3/extensions.rb index da17e85..c4fb176 100644 --- a/lib/rdf/n3/extensions.rb +++ b/lib/rdf/n3/extensions.rb @@ -1,5 +1,6 @@ # frozen_string_literal: true require 'rdf' +require 'rdf/n3/terminals' # Monkey-patch RDF::Enumerable to add `:existentials` and `:univerals` accessors module RDF @@ -11,27 +12,21 @@ module Enumerable # Universal quantifiers defined on this enumerable # @return [Array] attr_accessor :universals + end + class List ## - # An enumerable contains another enumerable if every statement in other is a statement in self + # A list is variable if any of its members are variable? # - # @param [RDF::Enumerable] other # @return [Boolean] - def contain?(other) - other.all? {|statement| has_statement?(statement)} - end - end - - class Statement - # Override variable? def variable? - to_a.any? {|term| !term.is_a?(RDF::Term) || term.variable?} || graph_name && graph_name.variable? + to_a.any?(&:variable?) end # Transform Statement into an SXP # @return [Array] def to_sxp_bin - [(variable? ? :pattern : :triple), subject, predicate, object, graph_name].compact + to_a.to_sxp_bin end ## @@ -39,24 +34,188 @@ def to_sxp_bin # # @return [String] def to_sxp - to_sxp_bin.to_sxp + to_a.to_sxp_bin.to_sxp end end - class Query::Solution + module Value + ## + # Returns `true` if `self` is a {RDF::N3::Algebra::Formula}. + # + # @return [Boolean] + def formula? + false + end - # Transform Statement into an SXP - # @return [Array] - def to_sxp_bin - [:solution] + bindings.map {|k, v| Query::Variable.new(k, v).to_sxp_bin} + # By default, returns itself. Can be used for terms such as blank nodes to be turned into non-disinguished variables. + # + # @param [RDF::Node] scope + # return [RDF::Query::Variable] + def to_ndvar(scope) + self + end + end + + module Term + ## + # Is this the same term? Like `#eql?`, but no variable matching + def sameTerm?(other) + eql?(other) end ## - # Returns an S-Expression (SXP) representation + # Parse the value as a numeric literal, or return 0. # - # @return [String] - def to_sxp - to_sxp_bin.to_sxp + # @return [RDF::Literal::Numeric] + def as_number + RDF::Literal(0) + end + + ## + # Parse the value as a dateTime literal, or return now. + # + # @return [RDF::Literal::DateTime] + def as_datetime + RDF::Literal::DateTime.new(DateTime.now) + end + end + + class Literal + include RDF::N3::Terminals + + ## + # Parse the value as a numeric literal, or return 0. + # + # @return [RDF::Literal::Numeric] + def as_number + return self if self.is_a?(RDF::Literal::Numeric) + case value + when DOUBLE then RDF::Literal::Double.new(value) + when DECIMAL then RDF::Literal::Decimal.new(value) + when INTEGER then RDF::Literal::Integer.new(value) + else + RDF::Literal(0) + end + end + + ## + # Parse the value as a dateTime literal, or return now. + # + # @return [RDF::Literal::DateTime] + def as_datetime + return self if is_a?(RDF::Literal::DateTime) + mvalue = value + mvalue = "#{mvalue}-01" if mvalue.match?(%r(^\d{4}$)) + mvalue = "#{mvalue}-01" if mvalue.match?(%r(^\d{4}-\d{2}$)) + RDF::Literal::DateTime.new(::DateTime.iso8601(mvalue), lexical: value) + rescue + RDF::Literal(0) + end + end + + class Node + # Transform to a nondistinguished exisetntial variable in a formula scope + # + # @param [RDF::Node] scope + # return [RDF::Query::Variable] + def to_ndvar(scope) + label = "#{id}_#{scope ? scope.id : 'base'}_undext" + RDF::Query::Variable.new(label, existential: true, distinguished: false) + end + end + + class Query + class Pattern + ## + # Overrides `#initialize!` to turn blank nodes into non-distinguished variables, if the `:ndvars` option is set. + alias_method :orig_initialize!, :initialize! + def initialize! + if @options[:ndvars] + @graph_name = @graph_name.to_ndvar(nil) if @graph_name + @subject = @subject.to_ndvar(@graph_name) + @predicate = @predicate.to_ndvar(@graph_name) + @object = @object.to_ndvar(@graph_name) + end + orig_initialize! + end + + ## + # Checks pattern equality against a statement, considering nesting an lists. + # + # * A pattern which has a pattern as a subject or an object, matches + # a statement having a statement as a subject or an object using {#eql?}. + # + # @param [Statement] other + # @return [Boolean] + # + # @see RDF::URI#== + # @see RDF::Node#== + # @see RDF::Literal#== + # @see RDF::Query::Variable#== + def eql?(other) + return false unless other.is_a?(RDF::Statement) && (self.graph_name || false) == (other.graph_name || false) + + [:subject, :predicate, :object].each do |part| + case o = self.send(part) + when RDF::Query::Pattern, RDF::List + return false unless o.eql?(other.send(part)) + else + return false unless o == other.send(part) + end + end + true + end + end + + class Solution + # Transform Statement into an SXP + # @return [Array] + def to_sxp_bin + [:solution] + bindings.map do |k, v| + existential = k.to_s.end_with?('ext') + k = k.to_s.sub(/_(?:und)?ext$/, '').to_sym + distinguished = !k.to_s.end_with?('undext') + Query::Variable.new(k, v, existential: existential, distinguished: distinguished).to_sxp_bin + end + end + end + + class Variable + ## + # True if the other is the same variable + def sameTerm?(other) + other.is_a?(::RDF::Query::Variable) && name.eql?(other.name) + end + + ## + # Parse the value as a numeric literal, or return 0. + # + # @return [RDF::Literal::Numeric] + def as_number + RDF::Literal(0) + end + end + end +end + +module SPARQL + module Algebra + class Operator + ## + # Map of related formulae, indexed by graph name. + # + # @return [Hash{RDF::Resource => RDF::N3::Algebra::Formula}] + def formulae + @options.fetch(:formulae, {}) + end + + # Updates the operands for this operator. + # + # @param [Array] ary + # @return [Array] + def operands=(ary) + @operands = ary + end end end end diff --git a/lib/rdf/n3/format.rb b/lib/rdf/n3/format.rb index 2d1ec6f..b67ac76 100644 --- a/lib/rdf/n3/format.rb +++ b/lib/rdf/n3/format.rb @@ -27,5 +27,70 @@ class Format < RDF::Format def self.symbols [:n3, :notation3] end + + ## + # Hash of CLI commands appropriate for this format + # @return [Hash{Symbol => Hash}] + def self.cli_commands + { + reason: { + description: "Reason over formulae.", + help: "reason [--think] file\nPerform Notation-3 reasoning.", + parse: false, + # Only shows when input and output format set + filter: {format: :n3}, + repository: RDF::N3::Repository.new, + lambda: ->(argv, **options) do + repository = options[:repository] + result_repo = RDF::N3::Repository.new + RDF::CLI.parse(argv, format: :n3, list_terms: true, **options) do |reader| + reasoner = RDF::N3::Reasoner.new(reader, **options) + reasoner.reason!(**options) + if options[:conclusions] + result_repo << reasoner.conclusions + elsif options[:data] + result_repo << reasoner.data + else + result_repo << reasoner + end + end + + # Replace input repository with results + repository.clear! + repository << result_repo + end, + options: [ + RDF::CLI::Option.new( + symbol: :conclusions, + datatype: TrueClass, + control: :checkbox, + use: :optional, + on: ["--conclusions"], + description: "Exclude formulae and statements in the original dataset."), + RDF::CLI::Option.new( + symbol: :data, + datatype: TrueClass, + control: :checkbox, + use: :optional, + on: ["--data"], + description: "Only results from default graph, excluding formulae or variables."), + RDF::CLI::Option.new( + symbol: :strings, + datatype: TrueClass, + control: :checkbox, + use: :optional, + on: ["--strings"], + description: "Returns the concatenated strings from log:outputString."), + RDF::CLI::Option.new( + symbol: :think, + datatype: TrueClass, + control: :checkbox, + use: :optional, + on: ["--think"], + description: "Continuously execute until results stop growing."), + ] + }, + } + end end end diff --git a/lib/rdf/n3/list.rb b/lib/rdf/n3/list.rb new file mode 100644 index 0000000..244d673 --- /dev/null +++ b/lib/rdf/n3/list.rb @@ -0,0 +1,630 @@ +module RDF::N3 + ## + # Sub-class of RDF::List which uses a native representation of values and allows recursive lists. + # + # Also serves as the vocabulary URI for expanding other methods + class List < RDF::List + # Allow a list to be treated as a term in a statement. + include ::RDF::Term + + URI = RDF::URI("http://www.w3.org/2000/10/swap/list#") + + # Returns a vocubulary term + def self.method_missing(property, *args, &block) + property = RDF::Vocabulary.camelize(property.to_s) + if args.empty? && !to_s.empty? + RDF::Vocabulary::Term.intern("#{URI}#{property}", attributes: {}) + else + super + end + end + + ## + # Returns the base URI for this vocabulary. + # + # @return [URI] + def self.to_uri + URI + end + + ## + # Attempts to create an RDF::N3::List from subject, or returns the node as is, if unable. + # + # @param [RDF::Resource] subject + # @return [RDF::List, RDF::Resource] returns either the original resource, or a list based on that resource + def self.try_list(subject, graph) + return subject unless subject && (subject.node? || subject.uri? && subject == RDF.nil) + ln = RDF::List.new(subject: subject, graph: graph) + return subject unless ln.valid? + + # Return a new list, outside of this queryable, with any embedded lists also expanded + values = ln.to_a.map {|li| try_list(li, graph)} + RDF::N3::List.new(subject: subject, graph: graph, values: values) + end + + ## + # Initializes a newly-constructed list. + # + # Instantiates a new list based at `subject`, which **must** be an RDF::Node. List may be initialized using passed `values`. + # + # @example add constructed list to existing graph + # l = RDF::N3::List(values: (1, 2, 3)) + # g = RDF::Graph.new << l + # g.count # => l.count + # + # If values is not provided, but subject and graph are, then will attempt to recursively represent lists. + # + # @param [RDF::Resource] subject (RDF.nil) + # Subject should be an {RDF::Node}, not a {RDF::URI}. A list with an IRI head will not validate, but is commonly used to detect if a list is valid. + # @param [RDF::Graph] graph (RDF::Graph.new) + # @param [Array] values + # Any values which are not terms are coerced to `RDF::Literal`. + # @yield [list] + # @yieldparam [RDF::List] list + def initialize(subject: nil, graph: nil, values: nil, &block) + @subject = subject || (Array(values).empty? ? RDF.nil : RDF::Node.new) + @graph = graph + @valid = true + + @values = case + when values + values.map do |v| + # Convert values, as necessary. + case v + when RDF::Value then v.to_term + when Symbol then RDF::Node.intern(v) + when Array then RDF::N3::List.new(values: v) + when nil then RDF.nil + else RDF::Literal.new(v) + end + end + when subject && graph + ln = RDF::List.new(subject: subject, graph: graph) + @valid = ln.valid? + ln.to_a.map {|li| self.class.try_list(li, graph)} + else + [] + end + end + + ## + # Lists are valid, unless established via RDF::List, in which case they are only valid if the RDF::List is valid. + # + # @return [Boolean] + def valid?; @valid; end + + ## + # @see RDF::Value#== + def ==(other) + case other + when Array, RDF::List then to_a == other.to_a + else + false + end + end + + ## + # The list hash is the hash of it's members. + # + # @see RDF::Value#hash + def hash + to_a.hash + end + + ## + # Element Assignment — Sets the element at `index`, or replaces a subarray from the `start` index for `length` elements, or replaces a subarray specified by the `range` of indices. + # + # @overload []=(index, term) + # Replaces the element at `index` with `term`. + # @param [Integer] index + # @param [RDF::Term] term + # A non-RDF::Term is coerced to a Literal. + # @return [RDF::Term] + # @raise [IndexError] + # + # @overload []=(start, length, value) + # Replaces a subarray from the `start` index for `length` elements with `value`. Value is a {RDF::Term}, Array of {RDF::Term}, or {RDF::List}. + # @param [Integer] start + # @param [Integer] length + # @param [RDF::Term, Array, RDF::List] value + # A non-RDF::Term is coerced to a Literal. + # @return [RDF::Term, RDF::List] + # @raise [IndexError] + # + # @overload []=(range, value) + # Replaces a subarray from the `start` index for `length` elements with `value`. Value is a {RDF::Term}, Array of {RDF::Term}, or {RDF::List}. + # @param [Range] range + # @param [RDF::Term, Array, RDF::List] value + # A non-RDF::Term is coerced to a Literal. + # @return [RDF::Term, RDF::List] + # @raise [IndexError] + def []=(*args) + value = case args.last + when Array then args.last + when RDF::List then args.last.to_a + else [args.last] + end.map do |v| + # Convert values, as necessary. + case v + when RDF::Value then v.to_term + when Symbol then RDF::Node.intern(v) + when Array then RDF::N3::List.new(values: v) + when nil then RDF.nil + else RDF::Literal.new(v) + end + end + + ret = case args.length + when 3 + start, length = args[0], args[1] + @subject = nil if start == 0 + @values[start, length] = value + when 2 + case args.first + when Integer + raise ArgumentError, "Index form of []= takes a single term" if args.last.is_a?(Array) + @values[args.first] = value.first + when Range + @values[args.first] = value + else + raise ArgumentError, "Index form of must use an integer or range" + end + else + raise ArgumentError, "List []= takes one or two index values" + end + + # Fill any nil entries in @values with rdf:nil + @values.map! {|v| v || RDF.nil} + + @subject = RDF.nil if @values.empty? + @subject ||= RDF::Node.new + ret # Returns inserted values + end + + ## + # Appends an element to the head of this list. Existing references are not updated, as the list subject changes as a side-effect. + # + # @example + # RDF::List[].unshift(1).unshift(2).unshift(3) #=> RDF::List[3, 2, 1] + # + # @param [RDF::Term, Array, RDF::List] value + # A non-RDF::Term is coerced to a Literal + # @return [RDF::List] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-unshift + # + def unshift(value) + value = normalize_value(value) + @values.unshift(value) + @subject = nil + + return self + end + + ## + # Removes and returns the element at the head of this list. + # + # @example + # RDF::List[1,2,3].shift #=> 1 + # + # @return [RDF::Term] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-shift + def shift + return nil if empty? + @subject = nil + @values.shift + end + + ## + # Empties this list + # + # @example + # RDF::List[1, 2, 2, 3].clear #=> RDF::List[] + # + # @return [RDF::List] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-clear + def clear + @values.clear + @subject = nil + self + end + + ## + # Appends an element to the tail of this list. + # + # @example + # RDF::List[] << 1 << 2 << 3 #=> RDF::List[1, 2, 3] + # + # @param [RDF::Term] value + # @return [RDF::List] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-3C-3C + def <<(value) + value = normalize_value(value) + @subject = nil + @values << value + self + end + + ## + # Returns `true` if this list is empty. + # + # @example + # RDF::List[].empty? #=> true + # RDF::List[1, 2, 3].empty? #=> false + # + # @return [Boolean] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-empty-3F + def empty? + @values.empty? + end + + ## + # Returns the length of this list. + # + # @example + # RDF::List[].length #=> 0 + # RDF::List[1, 2, 3].length #=> 3 + # + # @return [Integer] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-length + def length + @values.length + end + + ## + # Returns the index of the first element equal to `value`, or `nil` if + # no match was found. + # + # @example + # RDF::List['a', 'b', 'c'].index('a') #=> 0 + # RDF::List['a', 'b', 'c'].index('d') #=> nil + # + # @param [RDF::Term] value + # @return [Integer] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-index + def index(value) + @values.index(value) + end + + ## + # Returns element at `index` with default. + # + # @example + # RDF::List[1, 2, 3].fetch(0) #=> RDF::Literal(1) + # RDF::List[1, 2, 3].fetch(4) #=> IndexError + # RDF::List[1, 2, 3].fetch(4, nil) #=> nil + # RDF::List[1, 2, 3].fetch(4) { |n| n*n } #=> 16 + # + # @return [RDF::Term, nil] + # @see http://ruby-doc.org/core-1.9/classes/Array.html#M000420 + def fetch(*args, &block) + @values.fetch(*args, &block) + end + + ## + # Returns the element at `index`. + # + # @example + # RDF::List[1, 2, 3].at(0) #=> 1 + # RDF::List[1, 2, 3].at(4) #=> nil + # + # @return [RDF::Term, nil] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-at + def at(index) + @values.at(index) + end + + ## + # Returns the first element in this list. + # + # @example + # RDF::List[*(1..10)].first #=> RDF::Literal(1) + # + # @return [RDF::Term] + def first + @values.first + end + + ## + # Returns the last element in this list. + # + # @example + # RDF::List[*(1..10)].last #=> RDF::Literal(10) + # + # @return [RDF::Term] + # @see http://ruby-doc.org/core-2.2.2/Array.html#method-i-last + def last + @values.last + end + + ## + # Returns a list containing all but the first element of this list. + # + # @example + # RDF::List[1, 2, 3].rest #=> RDF::List[2, 3] + # + # @return [RDF::List] + def rest + self.class.new(values: @values[1..-1]) + end + + ## + # Returns a list containing the last element of this list. + # + # @example + # RDF::List[1, 2, 3].tail #=> RDF::List[3] + # + # @return [RDF::List] + def tail + self.class.new(values: @values[-1..-1]) + end + + ## + # Yields each element in this list. + # + # @example + # RDF::List[1, 2, 3].each do |value| + # puts value.inspect + # end + # + # @return [Enumerator] + # @see http://ruby-doc.org/core-1.9/classes/Enumerable.html + def each(&block) + return to_enum unless block_given? + + @values.each(&block) + end + + ## + # Yields each statement constituting this list. Uses actual statements if a graph was set, otherwise, the saved values. + # + # This will recursively get statements for sub-lists as well. + # + # @example + # RDF::List[1, 2, 3].each_statement do |statement| + # puts statement.inspect + # end + # + # @return [Enumerator] + # @see RDF::Enumerable#each_statement + def each_statement(&block) + return enum_statement unless block_given? + + if graph + RDF::List.new(subject: subject, graph: graph).each_statement(&block) + elsif @values.length > 0 + # Create a subject for each entry based on the subject bnode + subjects = (0..(@values.count-1)).map {|ndx| ndx > 0 ? RDF::Node.intern("#{subject.id}_#{ndx}") : subject} + *values, last = @values + while !values.empty? + subj = subjects.shift + value = values.shift + block.call(RDF::Statement(subj, RDF.first, value.list? ? value.subject : value)) + block.call(RDF::Statement(subj, RDF.rest, subjects.first)) + end + subj = subjects.shift + block.call(RDF::Statement(subj, RDF.first, last.list? ? last.subject : last)) + block.call(RDF::Statement(subj, RDF.rest, RDF.nil)) + end + + # If a graph was used, also get statements from sub-lists + @values.select(&:list?).each {|li| li.each_statement(&block)} + end + + ## + # Yields each subject term constituting this list along with sub-lists. + # + # @example + # RDF::List[1, 2, 3].each_subject do |subject| + # puts subject.inspect + # end + # + # @return [Enumerator] + # @see RDF::Enumerable#each + def each_subject(&block) + return enum_subject unless block_given? + + each_statement {|st| block.call(st.subject) if st.predicate == RDF.rest} + end + + ## + # Enumerate via depth-first recursive descent over list members, yielding each member + # @yield term + # @yieldparam [RDF::Term] term + # @return [Enumerator] + def each_descendant(&block) + if block_given? + each do |term| + term.each_descendant(&block) if term.list? + block.call(term) + end + end + enum_for(:each_descendant) + end + + ## + # Does this list, or any recusive list have any blank node members? + # + # @return [Boolean] + def has_nodes? + @values.any? {|e| e.node? || e.list? && e.has_nodes?} + end + + ## + # Substitutes blank node members with existential variables, recusively. + # + # @param [RDF::Node] scope + # @return [RDF::N3::List] + def to_ndvar(scope) + values = @values.map do |e| + case e + when RDF::Node then e.to_ndvar(scope) + when RDF::N3::List then e.to_ndvar(scope) + else e + end + end + RDF::N3::List.new(values: values) + end + + ## + # Returns the elements in this list as an array. + # + # @example + # RDF::List[].to_a #=> [] + # RDF::List[1, 2, 3].to_a #=> [RDF::Literal(1), RDF::Literal(2), RDF::Literal(3)] + # + # @return [Array] + def to_a + @values + end + + ## + # Checks pattern equality against another list, considering nesting. + # + # @param [List, Array] other + # @return [Boolean] + def eql?(other) + other = RDF::N3::List[*other] if other.is_a?(Array) + return false if !other.is_a?(RDF::List) || count != other.count + @values.each_with_index do |li, ndx| + case li + when RDF::Query::Pattern, RDF::N3::List + return false unless li.eql?(other.at(ndx)) + else + return false unless li == other.at(ndx) + end + end + true + end + + ## + # A list is variable if any of its members are variable? + # + # @return [Boolean] + def variable? + @values.any?(&:variable?) + end + + ## + # Return the variables contained this list + # @return [Array] + def vars + @values.vars + end + + ## + # Returns all variables in this list. + # + # Note: this returns a hash containing distinct variables only. + # + # @return [Hash{Symbol => Variable}] + def variables + @values.inject({}) do |hash, li| + li.respond_to?(:variables) ? hash.merge(li.variables) : hash + end + end + + ## + # Returns the number of variables in this list, recursively. + # + # @return [Integer] + def variable_count + variables.length + end + + ## + # Returns all values the list in the same pattern position + # + # @param [Symbol] var + # @param [RDF::N3::List] list + # @return [Array] + def var_values(var, list) + results = [] + @values.each_index do |ndx| + maybe_var = @values[ndx] + next unless maybe_var.respond_to?(:var_values) + results.push(*Array(maybe_var.var_values(var, list.at(ndx)))) + end + results.flatten.compact + end + + ## + # Evaluates the list using the given variable `bindings`. + # + # @param [Hash{Symbol => RDF::Term}] bindings + # a query solution containing zero or more variable bindings + # @param [Hash{Symbol => Object}] options ({}) + # options passed from query + # @return [RDF::N3::List] + # @see SPARQL::Algebra::Expression.evaluate + def evaluate(bindings, formulae: {}, **options) + # if values are constant, simply return ourselves + return self if to_a.none? {|li| li.node? || li.variable?} + bindings = bindings.to_h unless bindings.is_a?(Hash) + # Create a new list subject using a combination of the current subject and a hash of the binding values + subj = "#{subject.id}_#{bindings.values.sort.hash}" + values = to_a.map do |o| + o = o.evaluate(bindings, formulae: formulae, **options) || o + end + RDF::N3::List.new(subject: RDF::Node.intern(subj), values: values) + end + + ## + # Returns a query solution constructed by binding any variables in this list with the corresponding terms in the given `list`. + # + # @param [RDF::N3::List] list + # a native list with patterns to bind. + # @return [RDF::Query::Solution] + # @see RDF::Query::Pattern#solution + def solution(list) + RDF::Query::Solution.new do |solution| + @values.each_with_index do |li, ndx| + if li.respond_to?(:solution) + solution.merge!(li.solution(list[ndx])) + elsif li.is_a?(RDF::Query::Variable) + solution[li.to_sym] = list[ndx] + end + end + end + end + + ## + # Returns the base representation of this term. + # + # @return [Sring] + def to_base + "(#{@values.map(&:to_base).join(' ')})" + end + + # Transform Statement into an SXP + # @return [Array] + def to_sxp_bin + to_a.to_sxp_bin + end + + ## + # Creates a new list by recusively mapping the values of the list + # + # @return [RDF::N3::list] + def transform(&block) + values = self.to_a.map {|v| v.list? ? v.map(&block) : block.call(v)} + RDF::N3::List.new(values: values) + end + + private + + ## + # Normalizes `Array` to `RDF::List` and `nil` to `RDF.nil`. + # + # @param value [Object] + # @return [RDF::Value, Object] normalized value + def normalize_value(value) + case value + when RDF::Value then value.to_term + when Array then RDF::N3::List.new(values: value) + when Symbol then RDF::Node.intern(value) + when nil then RDF.nil + else RDF::Literal.new(value) + end + end + end +end diff --git a/lib/rdf/n3/patches/array_hacks.rb b/lib/rdf/n3/patches/array_hacks.rb deleted file mode 100644 index 17c20cd..0000000 --- a/lib/rdf/n3/patches/array_hacks.rb +++ /dev/null @@ -1,53 +0,0 @@ -class Array - # http://wiki.rubygarden.org/Ruby/page/show/ArrayPermute - # Permute an array, and call a block for each permutation - # Author: Paul Battley - def permute(prefixed=[]) - if (length < 2) - # there are no elements left to permute - yield(prefixed + self) - else - # recursively permute the remaining elements - each_with_index do |e, i| - (self[0,i]+self[(i+1)..-1]).permute(prefixed+[e]) { |a| yield a } - end - end - end unless Array.method_defined?(:permute) - - # Converts the array to a comma-separated sentence where the last element is joined by the connector word. Options: - # * :words_connector - The sign or word used to join the elements in arrays with two or more elements (default: ", ") - # * :two_words_connector - The sign or word used to join the elements in arrays with two elements (default: " and ") - # * :last_word_connector - The sign or word used to join the last element in arrays with three or more elements (default: ", and ") - def to_sentence(**options) - default_words_connector = ", " - default_two_words_connector = " and " - default_last_word_connector = ", and " - - # Try to emulate to_senteces previous to 2.3 - if options.has_key?(:connector) || options.has_key?(:skip_last_comma) - ::ActiveSupport::Deprecation.warn(":connector has been deprecated. Use :words_connector instead", caller) if options.has_key? :connector - ::ActiveSupport::Deprecation.warn(":skip_last_comma has been deprecated. Use :last_word_connector instead", caller) if options.has_key? :skip_last_comma - - skip_last_comma = options.delete :skip_last_comma - if connector = options.delete(:connector) - options[:last_word_connector] ||= skip_last_comma ? connector : ", #{connector}" - else - options[:last_word_connector] ||= skip_last_comma ? default_two_words_connector : default_last_word_connector - end - end - -# options.assert_valid_keys(:words_connector, :two_words_connector, :last_word_connector, :locale) - options = {words_connector: default_words_connector, two_words_connector: default_two_words_connector, last_word_connector: default_last_word_connector}.merge(options) - - case length - when 0 - "" - when 1 - self[0].to_s - when 2 - "#{self[0]}#{options[:two_words_connector]}#{self[1]}" - else - "#{self[0...-1].join(options[:words_connector])}#{options[:last_word_connector]}#{self[-1]}" - end - end unless Array.method_defined?(:to_sentence) -end diff --git a/lib/rdf/n3/reader.rb b/lib/rdf/n3/reader.rb index a3834da..8c2864a 100644 --- a/lib/rdf/n3/reader.rb +++ b/lib/rdf/n3/reader.rb @@ -1,4 +1,7 @@ # coding: utf-8 +require 'rdf/reader' +require 'ebnf' + module RDF::N3 ## # A Notation-3/Turtle parser in Ruby @@ -9,7 +12,9 @@ module RDF::N3 # # Separate pass to create branch_table from n3-selectors.n3 # - # This implementation uses distinguished variables for both universal and explicit existential variables (defined with `@forSome`). Variables created from blank nodes are non-distinguished. Distinguished existential variables are tracked using `$`, internally, as the RDF `query_pattern` logic looses details of the variable definition in solutions, where the variable is represented using a symbol. + # This implementation uses distinguished variables for both universal and explicit existential variables (defined with `@forSome`). Variables created from blank nodes are non-distinguished. Distinguished existential variables are named using an `_ext` suffix, internally, as the RDF `query_pattern` logic looses details of the variable definition in solutions, where the variable is represented using a symbol. + # + # Non-distinguished blank node variables are created as part of reasoning. # # @todo # * Formulae as RDF::Query representations @@ -18,17 +23,42 @@ module RDF::N3 # @author [Gregg Kellogg](http://greggkellogg.net/) class Reader < RDF::Reader format Format + using Refinements include RDF::Util::Logger - include Meta - include Parser + include EBNF::LL1::Parser + include Terminals - N3_KEYWORDS = %w(a is of has keywords prefix base true false forSome forAny) - - # The Blank nodes allocated for formula + # Nodes used as Formulae graph names + # # @return [Array] attr_reader :formulae + # All nodes allocated to formulae + # + # @return [Hash{RDF::Node => RDF::Graph}] + attr_reader :formula_nodes + + # Allocated variables by formula + # + # @return [Hash{Symbol => RDF::Node}] + attr_reader :variables + + ## + # N3 Reader options + # @see http://www.rubydoc.info/github/ruby-rdf/rdf/RDF/Reader#options-class_method + def self.options + super + [ + RDF::CLI::Option.new( + symbol: :list_terms, + datatype: TrueClass, + default: true, + control: :checkbox, + on: ["--list-terms CONTEXT"], + description: "Use native collections (lists), not first/rest ladder.") + ] + end + ## # Initializes the N3 reader instance. # @@ -40,11 +70,11 @@ class Reader < RDF::Reader # @option options [Boolean] :validate (false) # whether to validate the parsed statements and values # @option options [Boolean] :canonicalize (false) - # whether to canonicalize parsed literals - # @option options [Boolean] :intern (true) - # whether to intern all parsed URIs + # whether to canonicalize parsed literals and URIs. # @option options [Hash] :prefixes (Hash.new) # the prefix mappings to use (not supported by all readers) + # @option options [Hash] :list_terms (false) + # represent collections as an `RDF::Term`, rather than an rdf:first/rest ladder. # @return [reader] # @yield [reader] `self` # @yieldparam [RDF::Reader] reader @@ -52,44 +82,43 @@ class Reader < RDF::Reader # @raise [Error]:: Raises RDF::ReaderError if validating and an error is found def initialize(input = $stdin, **options, &block) super do - input.rewind if input.respond_to?(:rewind) - @input = input.respond_to?(:read) ? input : StringIO.new(input.to_s) - @lineno = 0 - readline # Prime the pump - - @memo = {} - @keyword_mode = false - @keywords = %w(a is of this has).map(&:freeze).freeze - @productions = [] - @prod_data = [] - - @branches = BRANCHES # Get from meta class - @regexps = REGEXPS # Get from meta class - - @formulae = [] # Nodes used as Formulae graph names - @formulae_nodes = {} - @label_uniquifier ||= "#{Random.new_seed}_000000" - @bnodes = {} # allocated bnodes by formula - @variables = {} # allocated variables by formula + @options = { + anon_base: "b0", + whitespace: WS, + depth: 0, + }.merge(@options) + @prod_stack = [] + + @formulae = [] + @formula_nodes = {} + @label_uniquifier = "0" + @bnodes = {} + @bn_labler = @options[:anon_base].dup + @bn_mapper = {} + @variables = {} if options[:base_uri] - log_info("@uri") { base_uri.inspect} - namespace(nil, uri("#{base_uri}#")) + progress("base_uri") { base_uri.inspect} + namespace(nil, iri(base_uri.to_s.match?(%r{[#/]$}) ? base_uri : "#{base_uri}#")) end # Prepopulate operator namespaces unless validating unless validate? - namespace(:crypto, RDF::N3::Crypto) - namespace(:list, RDF::N3::List) - namespace(:log, RDF::N3::Log) - namespace(:math, RDF::N3::Math) - namespace(:rei, RDF::N3::Rei) - #namespace(:string, RDF::N3::String) - namespace(:time, RDF::N3::Time) + namespace(:rdf, RDF.to_uri) + namespace(:rdfs, RDF::RDFS.to_uri) + namespace(:xsd, RDF::XSD.to_uri) + namespace(:crypto, RDF::N3::Crypto.to_uri) + namespace(:list, RDF::N3::List.to_uri) + namespace(:log, RDF::N3::Log.to_uri) + namespace(:math, RDF::N3::Math.to_uri) + namespace(:rei, RDF::N3::Rei.to_uri) + #namespace(:string, RDF::N3::String.to_uri) + namespace(:time, RDF::N3::Time.to_uri) end - log_info("validate") {validate?.inspect} - log_info("canonicalize") {canonicalize?.inspect} - log_info("intern") {intern?.inspect} + progress("validate") {validate?.inspect} + progress("canonicalize") {canonicalize?.inspect} + + @lexer = EBNF::LL1::Lexer.new(input, self.class.patterns, **@options) if block_given? case block.arity @@ -111,9 +140,16 @@ def inspect # @return [void] def each_statement(&block) if block_given? + log_recover @callback = block - parse(START.to_sym) + begin + while (@lexer.first rescue true) + read_n3Doc + end + rescue EBNF::LL1::Lexer::Error, SyntaxError, EOFError, Recovery + # Terminate loop if EOF found while recovering + end if validate? && log_statistics[:error] raise RDF::ReaderError, "Errors found during processing" @@ -140,526 +176,619 @@ def each_triple end protected - # Start of production - def onStart(prod) - handler = "#{prod}Start".to_sym - log_info("#{handler}(#{respond_to?(handler, true)})", prod, depth: depth) - @productions << prod - send(handler, prod) if respond_to?(handler, true) - - end - - # End of production - def onFinish - prod = @productions.pop() - handler = "#{prod}Finish".to_sym - log_info("#{handler}(#{respond_to?(handler, true)})", depth: depth) {"#{prod}: #{@prod_data.last.inspect}"} - send(handler) if respond_to?(handler, true) - end - - # Process of a token - def onToken(prod, tok) - unless @productions.empty? - parentProd = @productions.last - handler = "#{parentProd}Token".to_sym - log_info("#{handler}(#{respond_to?(handler, true)})", depth: depth) {"#{prod}, #{tok}: #{@prod_data.last.inspect}"} - send(handler, prod, tok) if respond_to?(handler, true) - else - error("Token has no parent production") - end - end - def booleanToken(prod, tok) - lit = RDF::Literal.new(tok.delete("@"), datatype: RDF::XSD.boolean, validate: validate?, canonicalize: canonicalize?) - add_prod_data(:literal, lit) + # Terminals passed to lexer. Order matters! + + # @!parse none + terminal(:ANON, ANON) + terminal(:BLANK_NODE_LABEL, BLANK_NODE_LABEL) + terminal(:IRIREF, IRIREF, unescape: true) + terminal(:DOUBLE, DOUBLE) + terminal(:DECIMAL, DECIMAL) + terminal(:INTEGER, INTEGER) + terminal(:PNAME_LN, PNAME_LN, unescape: true) + terminal(:PNAME_NS, PNAME_NS) + terminal(:STRING_LITERAL_LONG_SINGLE_QUOTE, STRING_LITERAL_LONG_SINGLE_QUOTE, unescape: true, partial_regexp: /^'''/) + terminal(:STRING_LITERAL_LONG_QUOTE, STRING_LITERAL_LONG_QUOTE, unescape: true, partial_regexp: /^"""/) + terminal(:STRING_LITERAL_QUOTE, STRING_LITERAL_QUOTE, unescape: true) + terminal(:STRING_LITERAL_SINGLE_QUOTE, STRING_LITERAL_SINGLE_QUOTE, unescape: true) + + # String terminals + terminal(nil, %r( + [\(\){},.;\[\]a!] + | \^\^|\^ + |<-|<=|=>|= + | true|false + | has|is|of + |@forAll|@forSome + )x) + + terminal(:PREFIX, PREFIX) + terminal(:BASE, BASE) + terminal(:LANGTAG, LANGTAG) + terminal(:QUICK_VAR_NAME, QUICK_VAR_NAME, unescape: true) + + private + ## + # Read statements and directives + # + # [1] n3Doc ::= (n3Statement '.' | sparqlDirective)* + # + # @return [void] + def read_n3Doc + prod(:n3Doc, %w{.}) do + error("read_n3Doc", "Unexpected end of file") unless token = @lexer.first + case token.type + when :BASE, :PREFIX + read_directive || error("Failed to parse directive", production: :directive, token: token) + else + read_n3Statement + if !log_recovering? || @lexer.first === '.' + # If recovering, we will have eaten the closing '.' + token = @lexer.shift + unless token && token.value == '.' + error("Expected '.' following n3Statement", production: :n3Statement, token: token) + end + end + end + end end - def declarationStart(prod) - @prod_data << {} - end - def declarationToken(prod, tok) - case prod - when "@prefix", "@base", "@keywords" - add_prod_data(:prod, prod) - when "prefix" - add_prod_data(:prefix, tok[0..-2]) - when "explicituri" - add_prod_data(:explicituri, tok[1..-2]) - else - add_prod_data(prod.to_sym, tok) + ## + # Read statements and directives + # + # [2] n3Statement ::= n3Directive | triples | existential | universal + # + # @return [void] + def read_n3Statement + prod(:n3Statement, %w{.}) do + error("read_n3Doc", "Unexpected end of file") unless token = @lexer.first + read_uniext || + read_triples || + error("Expected token", production: :statement, token: token) end end - def declarationFinish - decl = @prod_data.pop - case decl[:prod] - when "@prefix" - uri = process_uri(decl[:explicituri]) - namespace(decl[:prefix], uri) - when "@base" - # Base, set or update document URI - uri = decl[:explicituri] - options[:base_uri] = process_uri(uri) - - # The empty prefix "" is by default , bound to "#" -- the local namespace of the file. - # The parser behaves as though there were a - # @prefix : <#>. - # just before the file. - # This means that <#foo> can be written :foo and using @keywords one can reduce that to foo. - - namespace(nil, uri.match(/[\/\#]$/) ? base_uri : process_uri("#{uri}#")) - log_debug("declarationFinish[@base]", depth: depth) {"@base=#{base_uri}"} - when "@keywords" - log_debug("declarationFinish[@keywords]", depth: depth) {@keywords.inspect} - # Keywords are handled in tokenizer and maintained in @keywords array - if (@keywords & N3_KEYWORDS) != @keywords - error("Undefined keywords used: #{(@keywords - N3_KEYWORDS).to_sentence}") if validate? + ## + # Read base and prefix directives + # + # [3] n3Directive ::= prefixID | base + # + # @return [void] + def read_directive + prod(:directive, %w{.}) do + token = @lexer.first + case token.type + when :BASE + prod(:base) do + @lexer.shift + terminated = token.value == '@base' + iri = @lexer.shift + error("Expected IRIREF", production: :base, token: iri) unless iri === :IRIREF + @options[:base_uri] = process_iri(iri.value[1..-2].gsub(/\s/, '')) + namespace(nil, base_uri.to_s.end_with?('#') ? base_uri : iri("#{base_uri}#")) + error("base", "#{token} should be downcased") if token.value.start_with?('@') && token.value != '@base' + + if terminated + error("base", "Expected #{token} to be terminated") unless @lexer.first === '.' + @lexer.shift + elsif @lexer.first === '.' + error("base", "Expected #{token} not to be terminated") + else + true + end + end + when :PREFIX + prod(:prefixID, %w{.}) do + @lexer.shift + pfx, iri = @lexer.shift, @lexer.shift + terminated = token.value == '@prefix' + error("Expected PNAME_NS", production: :prefix, token: pfx) unless pfx === :PNAME_NS + error("Expected IRIREF", production: :prefix, token: iri) unless iri === :IRIREF + debug("prefixID", depth: options[:depth]) {"Defined prefix #{pfx.inspect} mapping to #{iri.inspect}"} + namespace(pfx.value[0..-2], process_iri(iri.value[1..-2].gsub(/\s/, ''))) + error("prefixId", "#{token} should be downcased") if token.value.start_with?('@') && token.value != '@prefix' + + if terminated + error("prefixID", "Expected #{token} to be terminated") unless @lexer.first === '.' + @lexer.shift + elsif @lexer.first === '.' + error("prefixID", "Expected #{token} not to be terminated") + else + true + end + end end - @userkeys = true - else - error("declarationFinish: FIXME #{decl.inspect}") end end - # Document start, instantiate - def documentStart(prod) - @formulae.push(nil) - @prod_data << {} - end - - def dtlangToken(prod, tok) - add_prod_data(:langcode, tok) if prod == "langcode" - end - - def existentialStart(prod) - @prod_data << {} - end - - # Apart from the set of statements, a formula also has a set of URIs of symbols which are universally quantified, - # and a set of URIs of symbols which are existentially quantified. - # Variables are then in general symbols which have been quantified. + ## + # Read triples # - # Here we allocate a variable (making up a name) and record with the defining formula. Quantification is done - # when the formula is completed against all in-scope variables - def existentialFinish - pd = @prod_data.pop - forSome = Array(pd[:symbol]) - forSome.each do |term| - var = univar(term, existential: true) - add_var_to_formula(@formulae.last, term, var) + # [9] triples ::= subject predicateObjectList? + # + # @return [Object] returns the last IRI matched, or subject BNode on predicateObjectList? + def read_triples + prod(:triples, %w{.}) do + error("read_triples", "Unexpected end of file") unless token = @lexer.first + subject = case token.type || token.value + when '[' + # blankNodePropertyList predicateObjectList? + read_blankNodePropertyList || error("Failed to parse blankNodePropertyList", production: :triples, token: @lexer.first) + else + # subject predicateObjectList + read_path || error("Failed to parse subject", production: :triples, token: @lexer.first) + end + read_predicateObjectList(subject) || subject end end - def expressionStart(prod) - @prod_data << {} + ## + # Read predicateObjectList + # + # [10] predicateObjectList ::= verb objectList (';' (verb objectList)?)* + # + # @param [RDF::Resource] subject + # @return [RDF::URI] the last matched verb + def read_predicateObjectList(subject) + return if @lexer.first.nil? || %w(. }).include?(@lexer.first.value) + prod(:predicateObjectList, %{;}) do + last_verb = nil + loop do + verb, invert = read_verb + break unless verb + last_verb = verb + prod(:_predicateObjectList_2) do + read_objectList(subject, verb, invert) || error("Expected objectList", production: :predicateObjectList, token: @lexer.first) + end + break unless @lexer.first === ';' + @lexer.shift while @lexer.first === ';' + end + last_verb + end end - # Process path items, and push on the last object for parent processing - def expressionFinish - expression = @prod_data.pop - - # If we're in teh middle of a pathtail, append - if @prod_data.last[:pathtail] && expression[:pathitem] && expression[:pathtail] - path_list = [expression[:pathitem]] + expression[:pathtail] - log_debug("expressionFinish(pathtail)", depth: depth) {"set pathtail to #{path_list.inspect}"} - @prod_data.last[:pathtail] = path_list + ## + # Read objectList + # + # [11] objectList ::= object (',' object)* + # + # @return [RDF::Term] the last matched subject + def read_objectList(subject, predicate, invert) + prod(:objectList, %{,}) do + last_object = nil + while object = prod(:_objectList_2) {read_path} + last_object = object + + if invert + add_statement(:objectList, object, predicate, subject) + else + add_statement(:objectList, subject, predicate, object) + end - dir_list = [expression[:direction]] if expression[:direction] - dir_list += expression[:directiontail] if expression[:directiontail] - @prod_data.last[:directiontail] = dir_list if dir_list - elsif expression[:pathitem] && expression[:pathtail] - add_prod_data(:expression, process_path(expression)) - elsif expression[:pathitem] - add_prod_data(:expression, expression[:pathitem]) - else - error("expressionFinish: FIXME #{expression.inspect}") + break unless @lexer.first === ',' + @lexer.shift while @lexer.first === ',' + end + last_object end end - def literalStart(prod) - @prod_data << {} - end - - def literalToken(prod, tok) - tok = tok[0, 3] == '"""' ? tok[3..-4] : tok[1..-2] - add_prod_data(:string, tok) + ## + # Read a verb + # + # [12] verb = predicate + # | 'a' + # | 'has' expression + # | 'is' expression 'of' + # | '<-' expression + # | '<=' + # | '=>' + # | '=' + # + # @return [RDF::Resource, Boolean] verb and invert? + def read_verb + invert = false + error("read_verb", "Unexpected end of file") unless token = @lexer.first + verb = case token.type || token.value + when 'a' then prod(:verb) {@lexer.shift && RDF.type} + when 'has' then prod(:verb) {@lexer.shift && read_path} + when 'is' then prod(:verb) { + @lexer.shift + invert, v = true, read_path + error( "Expected 'of'", production: :verb, token: @lexer.first) unless @lexer.first.value == 'of' + @lexer.shift + v + } + when '<-' then prod(:verb) { + @lexer.shift + invert = true + read_path + } + when '<=' then prod(:verb) { + @lexer.shift + invert = true + RDF::N3::Log.implies + } + when '=>' then prod(:verb) {@lexer.shift && RDF::N3::Log.implies} + when '=' then prod(:verb) {@lexer.shift && RDF::OWL.sameAs} + else read_path + end + [verb, invert] end - def literalFinish - lit = @prod_data.pop - content = RDF::NTriples.unescape(lit[:string]) - language = lit[:langcode] if lit[:langcode] - language = language.downcase if language && canonicalize? - datatype = lit[:symbol] - - lit = RDF::Literal.new(content, language: language, datatype: datatype, validate: validate?, canonicalize: canonicalize?) - add_prod_data(:literal, lit) - end + ## + # subjects, predicates and objects are all expressions, which are all paths + # + # [13] subject ::= expression + # [14] predicate ::= expression + # [16] expression ::= path + # [17] path ::= pathItem ('!' path | '^' path)? + # + # @return [RDF::Resource] + def read_path + return if @lexer.first.nil? || %w/. } ) ]/.include?(@lexer.first.value) + prod(:path) do + pathtail = path = {} + loop do + pathtail[:pathitem] = prod(:pathItem) do + read_iri || + read_blankNode || + read_quickVar || + read_collection || + read_blankNodePropertyList || + read_literal || + read_formula + end - def objectStart(prod) - @prod_data << {} - end + break if @lexer.first.nil? || !%w(! ^).include?(@lexer.first.value) + prod(:_path_2) do + pathtail[:direction] = @lexer.shift.value == '!' ? :forward : :reverse + pathtail = pathtail[:pathtail] = {} + end + end - def objectFinish - object = @prod_data.pop - if object[:expression] - add_prod_data(:object, object[:expression]) - else - error("objectFinish: FIXME #{object.inspect}") + # Returns the first object in the path + # FIXME: what if it's a verb? + process_path(path) end end - def pathitemStart(prod) - @prod_data << {} - end - - def pathitemToken(prod, tok) - case prod - when "numericliteral" - nl = RDF::NTriples.unescape(tok) - datatype = case nl - when /e/i then RDF::XSD.double - when /\./ then RDF::XSD.decimal - else RDF::XSD.integer + ## + # Read a literal + # + # [19] literal ::= rdfLiteral | numericLiteral | BOOLEAN_LITERAL + # + # @return [RDF::Literal] + def read_literal + error("Unexpected end of file", production: :literal) unless token = @lexer.first + case token.type || token.value + when :INTEGER then prod(:literal) {literal(@lexer.shift.value, datatype: RDF::XSD.integer, canonicalize: canonicalize?)} + when :DECIMAL + prod(:literal) do + value = @lexer.shift.value + value = "0#{value}" if value.start_with?(".") + literal(value, datatype: RDF::XSD.decimal, canonicalize: canonicalize?) end - - lit = RDF::Literal.new(nl, datatype: datatype, validate: validate?, canonicalize: canonicalize?) - add_prod_data(:literal, lit) - when "quickvariable" - # There is a also a shorthand syntax ?x which is the same as :x except that it implies that x is - # universally quantified not in the formula but in its parent formula - uri = process_qname(tok.sub('?', ':')) - var = uri.variable? ? uri : univar(uri) - add_var_to_formula(@formulae[-2], uri, var) - # Also add var to this formula - add_var_to_formula(@formulae.last, uri, var) - - add_prod_data(:symbol, var) - when "boolean" - lit = RDF::Literal.new(tok.delete("@"), datatype: RDF::XSD.boolean, validate: validate?, canonicalize: canonicalize?) - add_prod_data(:literal, lit) - when "[", "(" - # Push on state for content of blank node - @prod_data << {} - when "]", ")" - # Construct - symbol = process_anonnode(@prod_data.pop) - add_prod_data(:symbol, symbol) - when "{" - # A new formula, push on a node as a named graph - node = RDF::Node.new(".form_#{unique_label}") - @formulae << node - @formulae_nodes[node] = true - - # Promote variables defined on the earlier formula to this formula - @variables[node] = {} - @variables[@formulae[-2]].each do |name, var| - @variables[node][name] = var + when :DOUBLE then prod(:literal) {literal(@lexer.shift.value.sub(/\.([eE])/, '.0\1'), datatype: RDF::XSD.double, canonicalize: canonicalize?)} + when "true", "false" then prod(:literal) {literal(@lexer.shift.value, datatype: RDF::XSD.boolean, canonicalize: canonicalize?)} + when :STRING_LITERAL_QUOTE, :STRING_LITERAL_SINGLE_QUOTE + prod(:literal) do + value = @lexer.shift.value[1..-2] + error("read_literal", "Unexpected end of file") unless token = @lexer.first + case token.type || token.value + when :LANGTAG + literal(value, language: @lexer.shift.value[1..-1].to_sym) + when '^^' + @lexer.shift + literal(value, datatype: read_iri) + else + literal(value) + end + end + when :STRING_LITERAL_LONG_QUOTE, :STRING_LITERAL_LONG_SINGLE_QUOTE + prod(:literal) do + value = @lexer.shift.value[3..-4] + error("read_literal", "Unexpected end of file") unless token = @lexer.first + case token.type || token.value + when :LANGTAG + literal(value, language: @lexer.shift.value[1..-1].to_sym) + when '^^' + @lexer.shift + literal(value, datatype: read_iri) + else + literal(value) + end end - when "}" - # Pop off the formula - formula = @formulae.pop - add_prod_data(:symbol, formula) - else - error("pathitemToken(#{prod}, #{tok}): FIXME") end end - def pathitemFinish - pathitem = @prod_data.pop - if pathitem[:pathlist] - error("pathitemFinish(pathlist): FIXME #{pathitem.inspect}") - elsif pathitem[:propertylist] - error("pathitemFinish(propertylist): FIXME #{pathitem.inspect}") - elsif pathitem[:symbol] || pathitem[:literal] - add_prod_data(:pathitem, pathitem[:symbol] || pathitem[:literal]) - else - error("pathitemFinish: FIXME #{pathitem.inspect}") + ## + # Read a blankNodePropertyList + # + # [20] blankNodePropertyList ::= '[' predicateObjectList ']' + # + # @return [RDF::Node] + def read_blankNodePropertyList + token = @lexer.first + if token === '[' + prod(:blankNodePropertyList, %{]}) do + @lexer.shift + progress("blankNodePropertyList", depth: options[:depth], token: token) + node = bnode + debug("blankNodePropertyList: subject", depth: options[:depth]) {node.to_sxp} + read_predicateObjectList(node) + error("blankNodePropertyList", "Expected closing ']'") unless @lexer.first === ']' + @lexer.shift + node + end end end - def pathlistStart(prod) - @prod_data << {pathlist: []} - end - - def pathlistFinish - pathlist = @prod_data.pop - # Flatten propertylist into an array - expr = @prod_data.last.delete(:expression) - add_prod_data(:pathlist, expr) if expr - add_prod_data(:pathlist, pathlist[:pathlist]) if pathlist[:pathlist] - end - - def pathtailStart(prod) - @prod_data << {pathtail: []} - end - - def pathtailToken(prod, tok) - case tok - when "!", "." - add_prod_data(:direction, :forward) - when "^" - add_prod_data(:direction, :reverse) + ## + # Read a collection (`RDF::List`) + # + # [21] collection ::= '(' object* ')' + # + # If the `list_terms` option is given, the resulting resource is a list, otherwise, it is the list subject, and the first/rest entries are also emitted. + # @return [RDF::Node] + def read_collection + if @lexer.first === '(' + prod(:collection, %{)}) do + @lexer.shift + token = @lexer.first + progress("collection", depth: options[:depth]) {"token: #{token.inspect}"} + objects = [] + while @lexer.first.value != ')' && (object = read_path) + objects << object + end + error("collection", "Expected closing ')'") unless @lexer.first === ')' + @lexer.shift + list = RDF::N3::List.new(values: objects) + if options[:list_terms] + list + else + list.each_statement do |statement| + add_statement("collection", *statement.to_a) + end + list.subject + end + end end end - def pathtailFinish - pathtail = @prod_data.pop - add_prod_data(:pathtail, pathtail[:pathtail]) - add_prod_data(:direction, pathtail[:direction]) if pathtail[:direction] - add_prod_data(:directiontail, pathtail[:directiontail]) if pathtail[:directiontail] - end + ## + # Read a formula + # + # [22] formula ::= '{' formulaContent? '}' + # [23] formulaContent ::= n3Statement ('.' formulaContent?)? + # + # @return [RDF::Node] + def read_formula + if @lexer.first === '{' + prod(:formula, %(})) do + @lexer.shift + node = RDF::Node.intern("_form_#{unique_label}") + formulae.push(node) + formula_nodes[node] = true + debug(:formula, depth: @options[:depth]) {"id: #{node}, depth: #{formulae.length}"} + + # Promote variables defined on the earlier formula to this formula + variables[node] = {} + variables.fetch(formulae[-2], {}).each do |name, var| + variables[node][name] = var + end - def propertylistStart(prod) - @prod_data << {} - end + read_formulaContent - def propertylistFinish - propertylist = @prod_data.pop - # Flatten propertylist into an array - ary = [propertylist, propertylist.delete(:propertylist)].flatten.compact - @prod_data.last[:propertylist] = ary - end + # Pop off the formula + # Result is the BNode associated with the formula + debug(:formula, depth: @options[:depth]) {"pop: #{formulae.last}, depth: #{formulae.length}"} + error("collection", "Expected closing '}'") unless @lexer.shift === '}' - def simpleStatementStart(prod) - @prod_data << {} + formulae.pop + end + end end - # Completion of Simple Statement, all productions include :subject, and :propertyList - def simpleStatementFinish - statement = @prod_data.pop - - subject = statement[:subject] - properties = Array(statement[:propertylist]) - properties.each do |p| - predicate = p[:verb] - next unless predicate - log_debug("simpleStatementFinish(pred)", depth: depth) {predicate.to_s} - error(%(Illegal statment: "#{predicate}" missing object)) unless p.has_key?(:object) - objects = Array(p[:object]) - objects.each do |object| - if p[:invert] - add_statement("simpleStatementFinish", object, predicate, subject) + ## + # Read formula content, similaer to n3Statement + # + # [23] formulaContent ::= n3Statement ('.' formulaContent?)? + # + # @return [void] + def read_formulaContent + return if @lexer.first === '}' # Allow empty formula + prod(:formulaContent, %w(. })) do + loop do + token = @lexer.first + error("read_formulaContent", "Unexpected end of file") unless token + case token.type + when :BASE, :PREFIX + read_directive || error("Failed to parse directive", production: :directive, token: token) + break if @lexer.first === '}' else - add_statement("simpleStatementFinish", subject, predicate, object) + read_n3Statement + token = @lexer.first + case token.value + when '.' + @lexer.shift + # '.' optional at end of formulaContent + break if @lexer.first === '}' + when '}' + break + else + error("Expected '.' or '}' following n3Statement", production: :formulaContent, token: token) + end end end end end - def subjectStart(prod) - @prod_data << {} + ## + # Read an IRI + # + # (rule iri "26" (alt IRIREF prefixedName)) + # + # @return [RDF::URI] + def read_iri + token = @lexer.first + case token && token.type + when :IRIREF then prod(:iri) {process_iri(@lexer.shift.value[1..-2].gsub(/\s+/m, ''))} + when :PNAME_LN, :PNAME_NS then prod(:prefixedName) {process_pname(*@lexer.shift.value)} + end end - def subjectFinish - subject = @prod_data.pop - - if subject[:expression] - add_prod_data(:subject, subject[:expression]) - else - error("unknown expression type") + ## + # Read a blank node + # + # [29] blankNode ::= BLANK_NODE_LABEL | ANON + # + # @return [RDF::Node] + def read_blankNode + token = @lexer.first + case token && token.type + when :BLANK_NODE_LABEL then prod(:blankNode) {bnode(@lexer.shift.value[2..-1])} + when :ANON then @lexer.shift && prod(:blankNode) {bnode} end end - def symbolToken(prod, tok) - term = case prod - when 'explicituri' - process_uri(tok[1..-2]) - when 'qname' - process_qname(tok) - else - error("symbolToken(#{prod}, #{tok}): FIXME #{term.inspect}") + ## + # Read a quickVar, having global scope. + # + # [30] quickVar ::= QUICK_VAR_NAME + # + # @return [RDF::Query::Variable] + def read_quickVar + if @lexer.first.type == :QUICK_VAR_NAME + prod(:quickVar) do + token = @lexer.shift + value = token.value.sub('?', '') + iri = ns(nil, "#{value}_quick") + variables[nil][iri] ||= univar(iri, scope: nil) + end end - - add_prod_data(:symbol, term) end - def universalStart(prod) - @prod_data << {} + ## + # Read a list of IRIs + # + # [27] iriList ::= iri ( ',' iri )* + # + # @return [Array] the list of IRIs + def read_irilist + iris = [] + prod(:iriList, %{,}) do + while iri = read_iri + iris << iri + break unless @lexer.first === ',' + @lexer.shift while @lexer.first === ',' + end + end + iris end + ## + # Read a univeral or existential + # # Apart from the set of statements, a formula also has a set of URIs of symbols which are universally quantified, # and a set of URIs of symbols which are existentially quantified. # Variables are then in general symbols which have been quantified. # # Here we allocate a variable (making up a name) and record with the defining formula. Quantification is done # when the formula is completed against all in-scope variables - def universalFinish - pd = @prod_data.pop - forAll = Array(pd[:symbol]) - forAll.each do |term| - add_var_to_formula(@formulae.last, term, univar(term)) - end - end - - def verbStart(prod) - @prod_data << {} - end - - def verbToken(prod, tok) - term = case prod - when '<=' - add_prod_data(:expression, RDF::N3::Log.implies) - add_prod_data(:invert, true) - when '=>' - add_prod_data(:expression, RDF::N3::Log.implies) - when '=' - add_prod_data(:expression, RDF::OWL.sameAs) - when '@a' - add_prod_data(:expression, RDF.type) - when '@has', "@of" - # Syntactic sugar - when '@is' - add_prod_data(:invert, true) - else - error("verbToken(#{prod}, #{tok}): FIXME #{term.inspect}") - end - - add_prod_data(:symbol, term) - end - - def verbFinish - verb = @prod_data.pop - if verb[:expression] - error("Literal may not be used as a predicate") if verb[:expression].is_a?(RDF::Literal) - error("Formula may not be used as a peredicate") if @formulae_nodes.has_key?(verb[:expression]) - add_prod_data(:verb, verb[:expression]) - add_prod_data(:invert, true) if verb[:invert] - else - error("verbFinish: FIXME #{verb.inspect}") + # + # [31] existential ::= '@forSome' iriList + # [32] universal ::= '@forAll' iriList + # + # @return [void] + def read_uniext + if %w(@forSome @forAll).include?(@lexer.first.value) + token = @lexer.shift + prod(token === '@forAll' ? :universal : :existential) do + iri_list = read_irilist + iri_list.each do |iri| + # Note, this might re-create an equivalent variable already defined in this formula, and replaces an equivalent variable that may have been defined in the parent formula. + var = univar(iri, scope: formulae.last, existential: token === '@forSome') + add_var_to_formula(formulae.last, iri, var) + end + end end end - private - ################### # Utility Functions ################### - def process_anonnode(anonnode) - log_debug("process_anonnode", depth: depth) {anonnode.inspect} - - if anonnode[:propertylist] - properties = anonnode[:propertylist] - bnode = bnode() - properties.each do |p| - predicate = p[:verb] - log_debug("process_anonnode(verb)", depth: depth) {predicate.inspect} - objects = Array(p[:object]) - objects.each do |object| - if p[:invert] - add_statement("anonnode", object, predicate, bnode) - else - add_statement("anonnode", bnode, predicate, object) - end - end - end - bnode - elsif anonnode[:pathlist] - objects = Array(anonnode[:pathlist]) - list = RDF::List[*objects] - list_subjects = {} - list.each_statement do |statement| - next if statement.predicate == RDF.type && statement.object == RDF.List - add_statement("anonnode(list)", statement.subject, statement.predicate, statement.object) - end - list.subject - end - end - # Process a path, such as: - # :a.:b means [is :b of :a] Deprecated # :a!:b means [is :b of :a] => :a :b [] # :a^:b means [:b :a] => [] :b :a # # Create triple and return property used for next iteration - def process_path(expression) - log_debug("process_path", depth: depth) {expression.inspect} - - pathitem = expression[:pathitem] - pathtail = expression[:pathtail] - - direction_list = [expression[:direction], expression[:directiontail]].flatten.compact + # + # Result is last created bnode + def process_path(path) + pathitem, direction, pathtail = path[:pathitem], path[:direction], path[:pathtail] + debug("process_path", depth: @options[:depth]) {path.inspect} - pathtail.each do |pred| - direction = direction_list.shift - bnode = RDF::Node.new + while pathtail + bnode = bnode() + pred = pathtail.is_a?(RDF::Term) ? pathtail : pathtail[:pathitem] if direction == :reverse add_statement("process_path(reverse)", bnode, pred, pathitem) else add_statement("process_path(forward)", pathitem, pred, bnode) end pathitem = bnode + direction = pathtail[:direction] if pathtail.is_a?(Hash) + pathtail = pathtail.is_a?(Hash) && pathtail[:pathtail] end pathitem end - def process_uri(uri) - uri(base_uri, RDF::NTriples.unescape(uri)) - end - - def process_qname(tok) - if tok.include?(":") - prefix, name = tok.split(":") - elsif @userkeys - # If the @keywords directive is given, the keywords given will thereafter be recognized - # without a "@" prefix, and anything else is a local name in the default namespace. - prefix, name = "", tok - elsif %w(true false).include?(tok) - # The words true and false are boolean literals. - # - # They were added to Notation3 in 2006-02 in discussion with the SPARQL language developers, the Data - # Access Working Group. Note that no existing documents will have used a naked true or false word, without a - # @keyword statement which would make it clear that they were not to be treated as keywords. Furthermore any - # old parser encountering true or false naked or in a @keywords - return RDF::Literal.new(tok, datatype: RDF::XSD.boolean) - else - error("Set user @keywords to use barenames (#{tok}).") - end + def process_iri(iri) + iri(base_uri, iri.to_s) + end - uri = if prefix(prefix) - log_debug('process_qname(ns)', depth: depth) {"#{prefix(prefix)}, #{name}"} + def process_pname(value) + prefix, name = value.split(":", 2) + + iri = if prefix(prefix) + #debug('process_pname(ns)', depth: @options[:depth]) {"#{prefix(prefix)}, #{name}"} ns(prefix, name) - elsif prefix == '_' - log_debug('process_qname(bnode)', name, depth: depth) - # If we're in a formula, create a non-distigushed variable instead - # Note from https://www.w3.org/TeamSubmission/n3/#bnodes, it seems the blank nodes are scoped to the formula, not the file. - bnode(name) - else - log_debug('process_qname(default_ns)', name, depth: depth) - namespace(nil, uri("#{base_uri}#")) unless prefix(nil) + elsif prefix && !prefix.empty? + error("process_pname", "Use of undefined prefix #{prefix.inspect}") ns(nil, name) - end - log_debug('process_qname', depth: depth) {uri.inspect} - uri - end - - # Add values to production data, values aranged as an array - def add_prod_data(sym, value) - case @prod_data.last[sym] - when nil - @prod_data.last[sym] = value - when Array - @prod_data.last[sym] += Array(value) else - @prod_data.last[sym] = Array(@prod_data.last[sym]) + Array(value) + ns(nil, name) end + debug('process_pname', depth: @options[:depth]) {iri.inspect} + iri end # Keep track of allocated BNodes. Blank nodes are allocated to the formula. + # Unnnamed bnodes are created using an incrementing labeler for repeatability. def bnode(label = nil) + form_id = formulae.last ? formulae.last.id : '_bn_ground' if label - value = "#{label}_#{unique_label}" - (@bnodes[@formulae.last] ||= {})[label.to_s] ||= RDF::Node.new(value) - else - RDF::Node.new + # Return previously allocated blank node for. + @bn_mapper[form_id] ||= {} + return @bn_mapper[form_id][label] if @bn_mapper[form_id][label] end + + # Get a fresh label + @bn_labler.succ! while @bnodes[@bn_labler] + + bn = RDF::Node.intern(@bn_labler.to_sym) + @bnodes[@bn_labler] = bn + @bn_mapper[form_id][label] = bn if label + bn end - def univar(label, existential: false) - # Label using any provided label, followed by seed, followed by incrementing index - value = "#{label}_#{unique_label}" + # If not in ground formula, note scope, and if existential + def univar(label, scope:, existential: false) + value = existential ? "#{label}_ext" : label + value = "#{value}#{scope.id}" if scope RDF::Query::Variable.new(value, existential: existential) end @@ -678,46 +807,56 @@ def add_statement(node, subject, predicate, object) else RDF::Statement(subject, predicate, object) end - log_debug("statement(#{node})", depth: depth) {statement.to_s} + debug("statement(#{node})", depth: @options[:depth]) {statement.to_s} + error("statement(#{node})", "Statement is invalid: #{statement.inspect}") if validate? && statement.invalid? @callback.call(statement) end - def namespace(prefix, uri) - uri = uri.to_s - if uri == '#' - uri = prefix(nil).to_s + '#' + def namespace(prefix, iri) + iri = iri.to_s + if iri == '#' + iri = prefix(nil).to_s + '#' end - log_debug("namespace", depth: depth) {"'#{prefix}' <#{uri}>"} - prefix(prefix, uri(uri)) + debug("namespace", depth: @options[:depth]) {"'#{prefix}' <#{iri}>"} + prefix(prefix, iri(iri)) end - # Is this an allowable keyword? - def keyword_check(kw) - unless (@keywords || %w(a is of has)).include?(kw) - raise RDF::ReaderError, "unqualified keyword '#{kw}' used without @keyword directive" if validate? - end - end - - # Create URIs - def uri(value, append = nil) + # Create IRIs + def iri(value, append = nil) value = RDF::URI(value) value = value.join(append) if append value.validate! if validate? && value.respond_to?(:validate) value.canonicalize! if canonicalize? - value = RDF::URI.intern(value) if intern? # Variable substitution for in-scope variables. Variables are in scope if they are defined in anthing other than the current formula - var = find_var(@formulae.last, value) + var = find_var(value) value = var if var value + rescue ArgumentError => e + error("iri", e.message) + end + + # Create a literal + def literal(value, **options) + debug("literal", depth: @options[:depth]) do + "value: #{value.inspect}, " + + "options: #{options.inspect}, " + + "validate: #{validate?.inspect}, " + + "c14n?: #{canonicalize?.inspect}" + end + RDF::Literal.new(value, validate: validate?, canonicalize: canonicalize?, **options) + rescue ArgumentError => e + error("Argument Error #{e.message}", production: :literal, token: @lexer.first) end - def ns(prefix, suffix) + # Decode a PName + def ns(prefix = nil, suffix = nil) + namespace(nil, iri("#{base_uri}#")) if prefix.nil? && !prefix(nil) + base = prefix(prefix).to_s suffix = suffix.to_s.sub(/^\#/, "") if base.index("#") - log_debug("ns", depth: depth) {"base: '#{base}', suffix: '#{suffix}'"} - uri(base + suffix.to_s) + iri(base + suffix.to_s) end # Returns a unique label @@ -727,11 +866,10 @@ def unique_label end # Find any variable that may be defined in the formula identified by `bn` - # @param [RDF::Node] bn name of formula - # @param [#to_s] name + # @param [RDF::Node] name of formula # @return [RDF::Query::Variable] - def find_var(sym, name) - (@variables[sym] ||= {})[name.to_s] + def find_var(name) + (variables[@formulae.last] ||= {})[name.to_s] end # Add a variable to the formula identified by `bn`, returning the variable. Useful as an LRU for variable name lookups @@ -740,7 +878,146 @@ def find_var(sym, name) # @param [RDF::Query::Variable] var # @return [RDF::Query::Variable] def add_var_to_formula(bn, name, var) - (@variables[bn] ||= {})[name.to_s] = var + (variables[bn] ||= {})[name.to_s] = var + end + + def prod(production, recover_to = []) + @prod_stack << {prod: production, recover_to: recover_to} + @options[:depth] += 1 + recover("#{production}(start)", depth: options[:depth], token: @lexer.first) + yield + rescue EBNF::LL1::Lexer::Error, SyntaxError, Recovery => e + # Lexer encountered an illegal token or the parser encountered + # a terminal which is inappropriate for the current production. + # Perform error recovery to find a reasonable terminal based + # on the follow sets of the relevant productions. This includes + # remaining terms from the current production and the stacked + # productions + case e + when EBNF::LL1::Lexer::Error + @lexer.recover + begin + error("Lexer error", "With input '#{e.input}': #{e.message}", + production: production, + token: e.token) + rescue SyntaxError + end + end + raise EOFError, "End of input found when recovering" if @lexer.first.nil? + debug("recovery", "current token: #{@lexer.first.inspect}", depth: @options[:depth]) + + unless e.is_a?(Recovery) + # Get the list of follows for this sequence, this production and the stacked productions. + debug("recovery", "stack follows:", depth: @options[:depth]) + @prod_stack.reverse.each do |prod| + debug("recovery", level: 1, depth: @options[:depth]) {" #{prod[:prod]}: #{prod[:recover_to].inspect}"} + end + end + + # Find all follows to the top of the stack + follows = @prod_stack.map {|prod| Array(prod[:recover_to])}.flatten.compact.uniq + + # Skip tokens until one is found in follows + while (token = (@lexer.first rescue @lexer.recover)) && follows.none? {|t| token === t} + skipped = @lexer.shift + debug("recovery", depth: @options[:depth]) {"skip #{skipped.inspect}"} + end + debug("recovery", depth: @options[:depth]) {"found #{token.inspect} in follows"} + + # Re-raise the error unless token is a follows of this production + raise Recovery unless Array(recover_to).any? {|t| token === t} + + # Skip that token to get something reasonable to start the next production with + @lexer.shift + ensure + progress("#{production}(finish)", depth: options[:depth]) + @options[:depth] -= 1 + @prod_stack.pop + end + + def progress(*args, &block) + lineno = (options[:token].lineno if options[:token].respond_to?(:lineno)) || (@lexer && @lexer.lineno) + opts = args.last.is_a?(Hash) ? args.pop : {} + opts[:level] ||= 1 + opts[:lineno] ||= lineno + log_info(*args, **opts, &block) + end + + def recover(*args, &block) + lineno = (options[:token].lineno if options[:token].respond_to?(:lineno)) || (@lexer && @lexer.lineno) + opts = args.last.is_a?(Hash) ? args.pop : {} + opts[:level] ||= 1 + opts[:lineno] ||= lineno + log_recover(*args, **opts, &block) + end + + def debug(*args, &block) + lineno = (options[:token].lineno if options[:token].respond_to?(:lineno)) || (@lexer && @lexer.lineno) + opts = args.last.is_a?(Hash) ? args.pop : {} + opts[:level] ||= 0 + opts[:lineno] ||= lineno + log_debug(*args, **opts, &block) + end + + ## + # Error information, used as level `0` debug messages. + # + # @overload error(node, message, options) + # @param [String] node Relevant location associated with message + # @param [String] message Error string + # @param [Hash] options + # @option options [URI, #to_s] :production + # @option options [Token] :token + # @see {#debug} + def error(*args) + ctx = "" + ctx += "(found #{options[:token].inspect})" if options[:token] + ctx += ", production = #{options[:production].inspect}" if options[:production] + lineno = (options[:token].lineno if options[:token].respond_to?(:lineno)) || (@lexer && @lexer.lineno) + log_error(*args, ctx, + lineno: lineno, + token: options[:token], + production: options[:production], + depth: options[:depth], + exception: SyntaxError,) + end + + # Used for internal error recovery + class Recovery < StandardError; end + + class SyntaxError < RDF::ReaderError + ## + # The current production. + # + # @return [Symbol] + attr_reader :production + + ## + # The invalid token which triggered the error. + # + # @return [String] + attr_reader :token + + ## + # The line number where the error occurred. + # + # @return [Integer] + attr_reader :lineno + + ## + # Initializes a new syntax error instance. + # + # @param [String, #to_s] message + # @param [Hash{Symbol => Object}] options + # @option options [Symbol] :production (nil) + # @option options [String] :token (nil) + # @option options [Integer] :lineno (nil) + def initialize(message, **options) + @production = options[:production] + @token = options[:token] + @lineno = options[:lineno] || (@token.lineno if @token.respond_to?(:lineno)) + super(message.to_s) + end end end end diff --git a/lib/rdf/n3/reader/bnf-rules.n3 b/lib/rdf/n3/reader/bnf-rules.n3 deleted file mode 100644 index 1640f99..0000000 --- a/lib/rdf/n3/reader/bnf-rules.n3 +++ /dev/null @@ -1,134 +0,0 @@ -# -# Baccus - Naur Form (BNF) vocabulary -# - -@prefix rdf: . -@prefix rdfs: . -@prefix bnf: . -@prefix : . -@prefix rul: . -@prefix n3: . -@prefix list: . -@prefix doc: . -@prefix log: . -@prefix string: . -@keywords a, is, of. - - -<> rdfs:comment - -"""This set of rules process a BNF graph in its basic -cfg:mustBeOneOf BNF form and create the branching tables to drive a -predictive parser. - -See also cfg2bnf.n3 which expands the shothand ontology into the basic -BNF terms. -""". - -#_____________________________________ - - -# Enumerate options: - -{ ?x bnf:mustBeOneSequence ?y} => { ?x optionTail ?y }. - -{?x optionTail [rdf:first ?y; rdf:rest ?z]} => { - ?x bnf:branch [ bnf:sequence ?y]; - optionTail ?z. - }. - -{ ?x bnf:branch [bnf:sequence ?y] } => { ?y sequenceTail ?y }. - -sequenceTail a log:Chaff. -optionTail a log:Chaff. - -{ ?x sequenceTail [ rdf:rest ?z ] } => { ?x sequenceTail ?z }. - -# What productions can follow each other? -# This is used for working out when to - -{ ?x sequenceTail [ rdf:first ?y; rdf:rest [ rdf:first ?z ]] } => - { ?y bnf:canPrecede ?z }. - -{ ?x bnf:branch [ - bnf:sequence [ - list:last ?y]]. - ?x bnf:canPrecede ?z} => - { ?y bnf:canPrecede ?z }. - -{ ?x bnf:canPrecede ?y. - ?y bnf:branch [ bnf:sequence () ]. - ?y bnf:canPrecede ?z. -} => { - - ?x bnf:canPrecede ?z. -}. - - -bnf:eof bnf:canStartWith "@EOFDUMMY". # @@ kludge - -# Have to separate the next three rules or cwm seems to -# get screwed up and assume there is no solution @@@ - -{ ?x bnf:branch [bnf:sequence [ rdf:first ?y ]]. - } => { ?x bnf:TEST ?y }. - -{ ?x bnf:TEST ?y . - ?y log:rawType log:Literal. } => { ?x bnf:canStartWithLiteral ?y }. - -{ ?x bnf:canStartWithLiteral ?y . -# (?y "(.).*") string:scrape ?c # Use whole string - } => { ?y bnf:canStartWith ?y }. - -#______________________________________________________________ - - - -# Rules for determining branching - -# A branch has a sequence, which is the given BNF production, and -# one or more conditions, which are the strings on which to consider -# that branch. N3 is a langauge in whch the look-ahead often is only -# one character, and may allways be a constsnt string rather than a -# regexp (check). - -# A branchTail is a sequnece which a branch could start with -{ ?x bnf:branch ?b. - ?b bnf:sequence ?s. -} => { - ?b bnf:branchTail ?s. -}. - -{ ?b bnf:branchTail ?s. - ?s rdf:first [ bnf:branch [ bnf:sequence () ]]; - rdf:rest ?t -} => { - ?b bnf:branchTail ?t. -}. - - -{ ?x bnf:branch ?b. - ?b bnf:branchTail ?s. - ?s rdf:first [bnf:canStartWith ?y]. -} => { - ?x bnf:canStartWith ?y. - ?b bnf:condition ?y. -}. - - - -{ ?x bnf:branch ?b; - bnf:canPrecede ?z. - ?z log:rawType log:Literal. - ?b bnf:sequence (). -} => { ?b bnf:condition ?z}. - -{ ?x bnf:branch ?b; - bnf:canPrecede [bnf:canStartWith ?z]. - ?b bnf:sequence (). -} => { ?b bnf:condition ?z}. - - - - -#ends diff --git a/lib/rdf/n3/reader/meta.rb b/lib/rdf/n3/reader/meta.rb deleted file mode 100644 index 54e2d9e..0000000 --- a/lib/rdf/n3/reader/meta.rb +++ /dev/null @@ -1,641 +0,0 @@ -# coding: utf-8 -# This file is automatically generated by script/build_meta -# Branch and Regexp tables derived from http://www.w3.org/2000/10/swap/grammar/n3-selectors.n3 -module RDF::N3::Meta - BRANCHES = { - :"_:_g0" => { - "." => [], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g5"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g5"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g5"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g5"], - "}" => [], - }, - :"_:_g1" => { - "." => [], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g4"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g4"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g4"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g4"], - "}" => [], - }, - :"_:_g2" => { - "." => [], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#barename", - :"_:_g3"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#barename", - :"_:_g3"], - "}" => [], - }, - :"_:_g3" => { - "," => [",", - :"http://www.w3.org/2000/10/swap/grammar/n3#barename", - :"_:_g3"], - "." => [], - "}" => [], - }, - :"_:_g4" => { - "," => [",", - :"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g4"], - "." => [], - "}" => [], - }, - :"_:_g5" => { - "," => [",", - :"http://www.w3.org/2000/10/swap/grammar/n3#symbol", - :"_:_g5"], - "." => [], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#boolean" => { - "@false" => ["@false"], - "@true" => ["@true"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#declaration" => { - "@base" => ["@base", - :"http://www.w3.org/2000/10/swap/grammar/n3#explicituri"], - "@keywords" => ["@keywords", - :"_:_g2"], - "@prefix" => ["@prefix", - :"http://www.w3.org/2000/10/swap/grammar/n3#prefix", - :"http://www.w3.org/2000/10/swap/grammar/n3#explicituri"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#document" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@EOFDUMMY" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@base" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@forAll" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@forSome" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@keywords" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@prefix" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional", - :"http://www.w3.org/2000/10/swap/grammar/bnf#eof"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#dtlang" => { - "!" => [], - "\"" => [], - "(" => [], - ")" => [], - "+" => [], - "," => [], - "-" => [], - "." => [], - "0" => [], - ":" => [], - ";" => [], - "<" => [], - "<=" => [], - "=" => [], - "=>" => [], - "?" => [], - "@" => ["@", - :"http://www.w3.org/2000/10/swap/grammar/n3#langcode"], - "@a" => [], - "@false" => [], - "@has" => [], - "@is" => [], - "@of" => [], - "@true" => [], - "[" => [], - "]" => [], - "^" => [], - "^^" => ["^^", - :"http://www.w3.org/2000/10/swap/grammar/n3#symbol"], - "_" => [], - "a" => [], - "{" => [], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#existential" => { - "@forSome" => ["@forSome", - :"_:_g1"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#expression" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#pathitem", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#formulacontent" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@base" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@forAll" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@forSome" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@keywords" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@prefix" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#literal" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#string", - :"http://www.w3.org/2000/10/swap/grammar/n3#dtlang"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#object" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail" => { - "," => [",", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail"], - "." => [], - ";" => [], - "]" => [], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#pathitem" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#literal"], - "(" => ["(", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist", - ")"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#numericliteral"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#numericliteral"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#numericliteral"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#quickvariable"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#boolean"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#boolean"], - "[" => ["[", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist", - "]"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#symbol"], - "{" => ["{", - :"http://www.w3.org/2000/10/swap/grammar/n3#formulacontent", - "}"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - ")" => [], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression", - :"http://www.w3.org/2000/10/swap/grammar/n3#pathlist"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#pathtail" => { - "!" => ["!", - :"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "\"" => [], - "(" => [], - ")" => [], - "+" => [], - "," => [], - "-" => [], - "." => [], - "0" => [], - ":" => [], - ";" => [], - "<" => [], - "<=" => [], - "=" => [], - "=>" => [], - "?" => [], - "@a" => [], - "@false" => [], - "@has" => [], - "@is" => [], - "@of" => [], - "@true" => [], - "[" => [], - "]" => [], - "^" => ["^", - :"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "_" => [], - "a" => [], - "{" => [], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "." => [], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "<=" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "=" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "=>" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "@a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "@has" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "@is" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "]" => [], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#verb", - :"http://www.w3.org/2000/10/swap/grammar/n3#object", - :"http://www.w3.org/2000/10/swap/grammar/n3#objecttail", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail"], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail" => { - "." => [], - ";" => [";", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "]" => [], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#subject", - :"http://www.w3.org/2000/10/swap/grammar/n3#propertylist"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#statement" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "@base" => [:"http://www.w3.org/2000/10/swap/grammar/n3#declaration"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "@forAll" => [:"http://www.w3.org/2000/10/swap/grammar/n3#universal"], - "@forSome" => [:"http://www.w3.org/2000/10/swap/grammar/n3#existential"], - "@keywords" => [:"http://www.w3.org/2000/10/swap/grammar/n3#declaration"], - "@prefix" => [:"http://www.w3.org/2000/10/swap/grammar/n3#declaration"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#statementlist" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@base" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@forAll" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@forSome" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@keywords" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@prefix" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail"], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@EOFDUMMY" => [], - "@base" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@forAll" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@forSome" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@keywords" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@prefix" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#statement", - ".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statements_optional"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#statementtail" => { - "." => [".", - :"http://www.w3.org/2000/10/swap/grammar/n3#statementlist"], - "}" => [], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#subject" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#symbol" => { - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#qname"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#explicituri"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#qname"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#qname"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#universal" => { - "@forAll" => ["@forAll", - :"_:_g0"], - }, - :"http://www.w3.org/2000/10/swap/grammar/n3#verb" => { - "\"" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "(" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "+" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "-" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "0" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - ":" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "<" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "<=" => ["<="], - "=" => ["="], - "=>" => ["=>"], - "?" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@a" => ["@a"], - "@false" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@has" => ["@has", - :"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "@is" => ["@is", - :"http://www.w3.org/2000/10/swap/grammar/n3#expression", - "@of"], - "@true" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "[" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "_" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "a" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - "{" => [:"http://www.w3.org/2000/10/swap/grammar/n3#expression"], - }, - } - - if RUBY_VERSION >= "1.9.0" - BARENAME_START = "A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\u{10000}-\u{effff}" - BARENAME_TAIL = "0-9#{BARENAME_START}\u00b7\u0300-\u036f\u203f-\u2040\\-" - else - BARENAME_START = "A-Z_a-z\xc0-\xd6\xd8-\xf6\xf8-\xff" - BARENAME_TAIL = "0-9#{BARENAME_START}\xb7\\-" - end - REGEXPS = { - :"http://www.w3.org/2000/10/swap/grammar/n3#barename" => Regexp.compile(%(^[#{BARENAME_START}][#{BARENAME_TAIL}]*)), - :"http://www.w3.org/2000/10/swap/grammar/n3#explicituri" => Regexp.compile("^<[^>]*>"), - :"http://www.w3.org/2000/10/swap/grammar/n3#langcode" => Regexp.compile("^[a-zA-Z]+(-[a-zA-Z0-9]+)*"), - :"http://www.w3.org/2000/10/swap/grammar/n3#prefix" => Regexp.compile(%(^([#{BARENAME_START}][#{BARENAME_TAIL}]*)?:)), - :"http://www.w3.org/2000/10/swap/grammar/n3#qname" => Regexp.compile(%(^(([#{BARENAME_START}][#{BARENAME_TAIL}]*)?:)?([#{BARENAME_START}][#{BARENAME_TAIL}]*)?)), - :"http://www.w3.org/2000/10/swap/grammar/n3#quickvariable" => Regexp.compile(%(^\\?[#{BARENAME_START}][#{BARENAME_TAIL}]*)), - :"http://www.w3.org/2000/10/swap/grammar/n3#string" => Regexp.compile("(\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\")"), - - # Hack to replace integer|double|decimal with numericliteral - :"http://www.w3.org/2000/10/swap/grammar/n3#numericliteral" => Regexp.compile(%(^[-+]?[0-9]+(\\.[0-9]+)?([eE][-+]?[0-9]+)?)) - } -end diff --git a/lib/rdf/n3/reader/n3-selectors.n3 b/lib/rdf/n3/reader/n3-selectors.n3 deleted file mode 100644 index 4412259..0000000 Binary files a/lib/rdf/n3/reader/n3-selectors.n3 and /dev/null differ diff --git a/lib/rdf/n3/reader/n3.n3 b/lib/rdf/n3/reader/n3.n3 deleted file mode 100644 index ff013b5..0000000 --- a/lib/rdf/n3/reader/n3.n3 +++ /dev/null @@ -1,261 +0,0 @@ -# Notation3 in Notation3 -# Context Free Grammar without tokenization -# -@prefix rdf: . -@prefix rdfs: . -@prefix cfg: . -@prefix rul: . -@prefix : . -@prefix n3: . -@prefix list: . -@prefix string: . -@keywords a, is, of. - - -# Issues: -# - string token regexp not right FIXED -# - tokenizing rules in general: whitespace are not defined in n3.n3 -# and it would be nice for the *entire* syntax description to be in RDF. -# - encoding really needs specifying -# - @keywords affects tokenizing -# - comments (tokenizer deals with) -# - We assume ASCII, in fact should use not notNameChars for i18n - -# tokenizing: -# Absorb anything until end of regexp, then stil white space -# period followed IMMEDIATELY by an opener or name char is taken as "!". -# Except after a "." used instead of in those circumstances, -# ws may be inserted between tokens. -# WS MUST be inserted between tokens where ambiguity would arise. -# (possible ending characters of one and beginning characters overlap) -# - -#<> cfg:syntaxFor [ cfg:internetMediaType -# ]. - - -# __________________________________________________________________ -# -# The N3 Full Grammar - - -language a cfg:Language; - cfg:document document; - cfg:whiteSpace "@@@@@". - - -document a rul:Used; - cfg:mustBeOneSequence( - - ( -# [ cfg:zeroOrMore declaration ] -# [ cfg:zeroOrMore universal ] -# [ cfg:zeroOrMore existential ] - statements_optional - cfg:eof - ) - ). - -statements_optional cfg:mustBeOneSequence (() ( statement "." statements_optional ) ). - -# Formula does NOT need period on last statement - -formulacontent cfg:mustBeOneSequence ( - ( statementlist ) - ). - - -statementlist cfg:mustBeOneSequence ( - ( ) - ( statement statementtail ) - ). - -statementtail cfg:mustBeOneSequence ( - ( ) - ( "." statementlist ) - ). - - -statement cfg:mustBeOneSequence ( - (declaration) - (universal) - (existential) - (simpleStatement) - ). - -universal cfg:mustBeOneSequence ( - ( - "@forAll" - [ cfg:commaSeparatedListOf symbol ] - )). - -existential cfg:mustBeOneSequence( - ( "@forSome" - [ cfg:commaSeparatedListOf symbol ] - )). - - -declaration cfg:mustBeOneSequence( - ( "@base" explicituri ) - ( "@prefix" prefix explicituri ) - ( "@keywords" [ cfg:commaSeparatedListOf barename ] ) - ). - - -simpleStatement cfg:mustBeOneSequence(( subject propertylist )). - -propertylist cfg:mustBeOneSequence ( - ( ) - ( predicate object objecttail propertylisttail ) - ). - -propertylisttail cfg:mustBeOneSequence ( - ( ) - ( ";" propertylist ) - ). - - -objecttail cfg:mustBeOneSequence ( - ( ) - ( "," object objecttail ) - ). - - -predicate cfg:mustBeOneSequence ( - ( expression ) - ( "@has" expression ) - ( "@is" expression "@of" ) - ( "@a" ) - ( "=" ) - ( "=>" ) - ( "<=" ) - ). - -subject cfg:mustBeOneSequence ((expression)). - -object cfg:mustBeOneSequence ((expression)). - -expression cfg:mustBeOneSequence( - ( pathitem pathtail ) - ). - -pathtail cfg:mustBeOneSequence( - ( ) - ( "!" expression ) - ( "^" expression ) - ). - - -pathitem cfg:mustBeOneSequence ( - ( symbol ) - ( "{" formulacontent "}" ) - ( quickvariable ) - ( numericliteral ) - ( literal ) - ( "[" propertylist "]" ) - ( "(" pathlist ")" ) - ( boolean ) -# ( "@this" ) # Deprocated. Was allowed for this log:forAll x -). - - -boolean cfg:mustBeOneSequence ( - ( "@true" ) - ( "@false" ) -) . - -pathlist cfg:mustBeOneSequence (() (expression pathlist)). - -symbol cfg:mustBeOneSequence ( - (explicituri) - (qname) - ). - - -numericliteral cfg:mustBeOneSequence ( - ( integer ) - ( rational ) - ( double ) - ( decimal ) -) . - -rational cfg:mustBeOneSequence (( integer "/" unsignedint)). - - -literal cfg:mustBeOneSequence(( string dtlang)). - -dtlang cfg:mustBeOneSequence( () ("@" langcode) ("^^" symbol)). - - -#______________________________________________________________________ -# -# TERMINALS -# -# "canStartWith" actually gives "a" for the whole class of alpha characters -# and "0" for any of the digits 0-9. This is used to build the branching -# tables. -# -integer cfg:matches """[-+]?[0-9]+"""; - cfg:canStartWith "0", "-", "+". -unsignedint cfg:matches """[0-9]+"""; - cfg:canStartWith "0". -double cfg:matches """[-+]?[0-9]+(\\.[0-9]+)?([eE][-+]?[0-9]+)"""; - cfg:canStartWith "0", "-", "+". -decimal cfg:matches """[-+]?[0-9]+\\.[0-9]*"""; - cfg:canStartWith "0", "-", "+". - -#numericliteral cfg:matches """[-+]?[0-9]+(\\.[0-9]+)?(e[-+]?[0-9]+)?"""; -# cfg:canStartWith "0", "-", "+". - -explicituri cfg:matches "<[^>]*>"; - cfg:canStartWith "<". - -prefix cfg:matches "([A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*)?:"; - cfg:canStartWith "a", "_", ":". # @@ etc unicode - -qname cfg:matches "(([A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*)?:)?[A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*"; - cfg:canStartWith "a", "_", ":". # @@ etc unicode - -# ASCII version: -#barename cfg:matches "[a-zA-Z_][a-zA-Z0-9_]*"; # subset of qname -# cfg:canStartWith "a", "_". # @@ etc - -# This is the XML1.1 -barename cfg:matches "[A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*"; - cfg:canStartWith "a", "_". # @@ etc . - -# as far as I can tell, the regexp should be -# barename cfg:matches "[A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*" . -# - -quickvariable cfg:matches "\\?[A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff][\\-0-9A-Z_a-z\u00b7\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u037d\u037f-\u1fff\u200c-\u200d\u203f-\u2040\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\U00010000-\U000effff]*"; # ? barename - cfg:canStartWith "?". # - -# Maybe dtlang should just be part of string regexp? -# Whitespace is not allowed - -# was: "[a-zA-Z][a-zA-Z0-9]*(-[a-zA-Z0-9]+)?"; -langcode cfg:matches "[a-z]+(-[a-z0-9]+)*"; # https://www.w3.org/TR/rdf-testcases/#language - cfg:canStartWith "a". - - -# raw regexp single quoted would be "([^"]|(\\"))*" -# See: -# $ PYTHONPATH=$SWAP python -# >>> import tokenize -# >>> import notation3 -# >>> print notation3.stringToN3(tokenize.Double3) -# "[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"" -# >>> print notation3.stringToN3(tokenize.Double) -# "[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"" -# After that we have to prefix with one or three opening \" which -# the python regexp doesn't have. -# -# string3 cfg:matches "\"\"\"[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"". -# string1 cfg:matches "\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"". - -string cfg:matches "(\"\"\"[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\")|(\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\")"; - cfg:canStartWith "\"". - - -#ends diff --git a/lib/rdf/n3/reader/parser.rb b/lib/rdf/n3/reader/parser.rb deleted file mode 100644 index 903ea32..0000000 --- a/lib/rdf/n3/reader/parser.rb +++ /dev/null @@ -1,239 +0,0 @@ -# coding: utf-8 -# Simple parser to go through productions without attempting evaluation -module RDF::N3 - module Parser - START = 'http://www.w3.org/2000/10/swap/grammar/n3#document' - R_WHITESPACE = Regexp.compile('\A\s*(?:#.*$)?') - R_MLSTRING = Regexp.compile("^.*([^\"\\\\]*)\"\"\"") - SINGLE_CHARACTER_SELECTORS = %{\t\r\n !\"#$\%&'()*.,+/;<=>?[\\]^`{|}~} - NOT_QNAME_CHARS = SINGLE_CHARACTER_SELECTORS + "@" - NOT_NAME_CHARS = NOT_QNAME_CHARS + ":" - - def error(str) - log_error(str, lineno: @lineno, exception: RDF::ReaderError) - end - - def parse(prod) - todo_stack = [{prod: prod, terms: nil}] - while !todo_stack.empty? - pushed = false - if todo_stack.last[:terms].nil? - todo_stack.last[:terms] = [] - tok = self.token - log_debug("parse tok: '#{tok}'", depth: depth) {"prod #{todo_stack.last[:prod]}"} - - # Got an opened production - onStart(abbr(todo_stack.last[:prod])) - break if tok.nil? - - cur_prod = todo_stack.last[:prod] - prod_branch = @branches[cur_prod] - error("No branches found for '#{abbr(cur_prod)}'") if prod_branch.nil? - sequence = prod_branch[tok] - if sequence.nil? - dump_stack(todo_stack) if $verbose - expected = prod_branch.values.uniq.map {|u| u.map {|v| abbr(v).inspect}.join(",")} - error("Found '#{tok}' when parsing a #{abbr(cur_prod)}. expected #{expected.join(' | ')}") - end - #log_debug("sequence", depth: depth) {sequence.inspect} - todo_stack.last[:terms] += sequence - end - - #log_debug("parse", depth: depth) {todo_stack.last.inspect} - while !todo_stack.last[:terms].to_a.empty? - term = todo_stack.last[:terms].shift - if term.is_a?(String) - log_debug("parse term(string)", depth: depth) {term.to_s} - word = buffer[0, term.length] - if word == term - onToken(term, word) - consume(term.length) - elsif '@' + word.chop == term && @keywords.include?(word.chop) - onToken(term, word.chop) - consume(term.length - 1) - else - error("Found '#{buffer[0, 10]}...'; #{term} expected") - end - elsif regexp = @regexps[term] - if abbr(term) == 'string' && buffer[0, 3] == '"""' - # Read until end of multi-line comment if this is the start of a multi-line comment - string = '"""' - consume(3) - next_line = buffer - #log_debug("ml-str(start)", depth: depth) {next_line.dump} - until md = R_MLSTRING.match(next_line) - begin - string += next_line - next_line = readline - rescue EOFError - error("EOF reached searching for end of multi-line comment") - end - end - string += md[0].to_s - consume(md[0].to_s.length) - onToken('string', string) - #log_debug("ml-str now", depth: depth) {buffer.dump} - else - md = regexp.match(buffer) - error("Token(#{abbr(term)}) '#{buffer[0, 10]}...' should match #{regexp}") unless md - log_debug("parse", depth: depth) {"term(#{abbr(term)}:regexp): #{term}, #{regexp}.match('#{buffer[0, 10]}...') => '#{md.inspect.force_encoding(Encoding::UTF_8)}'"} - onToken(abbr(term), md.to_s) - consume(md[0].length) - end - else - log_debug("parse term(push)", depth: depth) {term} - todo_stack << {prod: term, terms: nil} - pushed = true - break - end - self.token - end - - while !pushed && todo_stack.last[:terms].to_a.empty? - todo_stack.pop - self.onFinish - end - end - while !todo_stack.empty? - todo_stack.pop - self.onFinish - end - end - - # Memoizer for get_token - def token - unless @memo.has_key?(@pos) - tok = self.get_token - @memo[@pos] = tok - log_debug("token", depth: depth) {"'#{tok}'('#{buffer[0, 10]}...')"} if buffer - end - @memo[@pos] - end - - def get_token - whitespace - - return nil if buffer.nil? - - ch2 = buffer[0, 2] - return ch2 if %w(=> <= ^^).include?(ch2) - - ch = buffer[0, 1] - @keyword_mode = false if ch == '.' && @keyword_mode - - return ch if SINGLE_CHARACTER_SELECTORS.include?(ch) - return ":" if ch == ":" - return "0" if "+-0123456789".include?(ch) - - if ch == '@' - return '@' if @pos > 0 && @line[@pos-1, 1] == '"' - - j = 0 - j += 1 while buffer[j+1, 1] && !NOT_NAME_CHARS.include?(buffer[j+1, 1]) - name = buffer[1, j] - if name == 'keywords' - @keywords = [] - @keyword_mode = true - end - return '@' + name - end - - j = 0 - j += 1 while buffer[j, 1] && !NOT_QNAME_CHARS.include?(buffer[j, 1]) - word = buffer[0, j] - error("Tokenizer expected qname, found #{buffer[0, 10]}") unless word - if @keyword_mode - @keywords << word - elsif @keywords.include?(word) - if word == 'keywords' - @keywords = [] - @keyword_mode = true - end - return '@' + word.to_s # implicit keyword - end - - 'a' - end - - def whitespace - while buffer && md = R_WHITESPACE.match(buffer) - return unless md[0].length > 0 - consume(md[0].length) - #log_debug("ws", depth: depth) {"'#{md[0]}', pos=#{@pos}"} - end - end - - def readline - @line = @input.readline - @lineno += 1 - @line.force_encoding(Encoding::UTF_8) - log_debug("readline[#{@lineno}]", depth: depth) {@line.dump} - @pos = 0 - @line - rescue EOFError - @line, @pos = nil, 0 - end - - # Return data from current off set to end of line - def buffer - @line[@pos, @line.length - @pos] unless @line.nil? - end - - # Cause n characters of line to be consumed. Read new line while line is empty or until eof - def consume(n) - @memo = {} - @pos += n - readline while @line && @line.length <= @pos - #log_debug("consume[#{n}]", depth: depth) {buffer} - end - - def abbr(prodURI) - prodURI.to_s.split('#').last - end - - def depth; (@productions || []).length; end - - def onStart(prod) - $stdout.puts ' ' * @productions.length + prod - @productions << prod - end - - def onFinish - prod = @productions.pop() - $stdout.puts ' ' * @productions.length + '/' + prod - end - - def onToken(prod, tok) - $stdout.puts ' ' * @productions.length + "#{prod}(#{tok})" - end - - def dump_stack(stack) - STDERR.puts "\nstack trace:" - stack.reverse.each do |se| - STDERR.puts "#{se[:prod]}" - STDERR.puts " " + case se[:terms] - when nil then "nil" - when [] then "empty" - else se[:terms].join(",\n ") - end - end - end - - def test(input, branches, regexps) - # FIXME: for now, read in entire doc, eventually, process as stream - @input = input.respond_to?(:read) ? (input.rewind; input) : StringIO.new(input.to_s) - @lineno = 0 - readline # Prime the pump - $stdout ||= STDOUT - - @memo = {} - @keyword_mode = false - @keywords = %w(a is of this has) - @productions = [] - - @branches = branches - @regexps = regexps - parse(START.to_sym) - end - end -end \ No newline at end of file diff --git a/lib/rdf/n3/reasoner.rb b/lib/rdf/n3/reasoner.rb index 963da32..ea31d9b 100644 --- a/lib/rdf/n3/reasoner.rb +++ b/lib/rdf/n3/reasoner.rb @@ -11,13 +11,13 @@ class Reasoner include RDF::Mutable include RDF::Util::Logger - # The top-level parsed formula + # The top-level parsed formula, including builtins and variables. # @return [RDF::N3::Algebra::Formula] attr_reader :formula # Opens a Notation-3 file, and parses it to initialize the reasoner # - # @param [String, #to_s] filename + # @param [String, #to_s] file # @yield [reasoner] `self` # @yieldparam [RDF::N3::Reasoner] reasoner # @yieldreturn [void] ignored @@ -50,7 +50,8 @@ def self.open(file) # RDF::N3::Reader.open("rules.n3") {|r| reasoner << r} # reasoner.each_triple {} # - # @param [RDF::Enumerable] input (nil) + # @param [RDF::Mutable] input (nil) + # Input should be parsed N3 using native lists (see `:list_terms` option to {RDF::N3::Reader#initialize}) # @param [Hash{Symbol => Object}] options # @option options [#to_s] :base_uri (nil) # the base URI to use when resolving relative URIs (for acessing intermediate parser productions) @@ -59,14 +60,16 @@ def self.open(file) # @yieldreturn [void] ignored # @return [RDF::N3::Reasoner] def initialize(input, **options, &block) - @options = options + @options = options.merge(strings: {}) # for --strings and log:outputString @mutable = case input when RDF::Mutable then input - when RDF::Enumerable then RDF::Repository.new {|r| r << input} - else RDF::Repository.new + when RDF::Enumerable then RDF::N3::Repository.new {|r| r << input} + else RDF::N3::Repository.new end - log_debug("reasoner: expression", **options) {SXP::Generator.string(formula.to_sxp_bin)} + @formula = input if input.is_a?(RDF::N3::Algebra::Formula) + + log_debug("reasoner: expression") {SXP::Generator.string(formula.to_sxp_bin)} if block_given? case block.arity @@ -79,7 +82,7 @@ def initialize(input, **options, &block) ## # Returns a copy of this reasoner def dup - repo = RDF::Repository.new {|r| r << @mutable} + repo = RDF::N3::Repository.new {|r| r << @mutable} self.class.new(repo) do |reasoner| reasoner.instance_variable_set(:@options, @options.dup) reasoner.instance_variable_set(:@formula, @formula.dup) if @formula @@ -92,6 +95,7 @@ def dup # @param [RDF::Statement] statement # @return [void] def insert_statement(statement) + @formula = nil @mutable.insert_statement(statement) end @@ -100,6 +104,7 @@ def insert_statement(statement) # # @param [Hash{Symbol => Object}] options # @option options [Boolean] :apply + # @option options [Boolean] :rules # @option options [Boolean] :think # @yield [statement] # @yieldparam [RDF::Statement] statement @@ -107,29 +112,38 @@ def insert_statement(statement) def execute(**options, &block) @options[:logger] = options[:logger] if options.has_key?(:logger) + # The knowledge base is the non-variable portions of formula + knowledge_base = RDF::N3::Repository.new {|r| r << formula} + log_debug("reasoner: knowledge_base") {SXP::Generator.string(knowledge_base.statements.to_sxp_bin)} + # If thinking, continuously execute until results stop growing - if options[:think] - count = 0 - log_info("reasoner: think start") { "count: #{count}"} - while @mutable.count > count - count = @mutable.count - dataset = RDF::Graph.new << @mutable.project_graph(nil) - log_depth {formula.execute(dataset, **options)} - @mutable << formula + count = -1 + log_info("reasoner: start") { "count: #{count}"} + solutions = RDF::Query::Solutions(RDF::Query::Solution.new) + while knowledge_base.count > count + log_info("reasoner: do") { "count: #{count}"} + count = knowledge_base.count + log_depth {formula.execute(knowledge_base, solutions: solutions, **options)} + knowledge_base << formula + solutions = RDF::Query::Solutions(RDF::Query::Solution.new) if solutions.empty? + log_debug("reasoner: solutions") {SXP::Generator.string solutions.to_sxp_bin} + log_debug("reasoner: datastore") {SXP::Generator.string knowledge_base.statements.to_sxp_bin} + log_info("reasoner: inferred") {SXP::Generator.string knowledge_base.statements.select(&:inferred?).to_sxp_bin} + log_info("reasoner: formula") do + SXP::Generator.string RDF::N3::Algebra::Formula.from_enumerable(knowledge_base).to_sxp_bin + end + @formula = nil # cause formula to be re-calculated from knowledge-base + unless options[:think] + count = knowledge_base.count + break end - log_info("reasoner: think end") { "count: #{count}"} - else - # Run one iteration - log_info("reasoner: apply start") { "count: #{count}"} - dataset = RDF::Graph.new << @mutable.project_graph(nil) - log_depth {formula.execute(dataset, **options)} - @mutable << formula - log_info("reasoner: apply end") { "count: #{count}"} end + log_info("reasoner: end") { "count: #{count}"} - log_debug("reasoner: datastore") {@mutable.to_sxp} + # Add updates back to mutable, containg builtins and variables. + @mutable << knowledge_base - conclusions(&block) if block_given? + each(&block) if block_given? self end alias_method :reason!, :execute @@ -137,7 +151,7 @@ def execute(**options, &block) ## # Reason with results in a duplicate datastore # - # @see {execute} + # @see execute def reason(**options, &block) self.dup.reason!(**options, &block) end @@ -180,7 +194,7 @@ def data(&block) alias_method :each_datum, :data ## - # Returns an enumerator for {#conclusions}. + # Returns an enumerator for {#data}. # FIXME: enum_for doesn't seem to be working properly # in JRuby 1.7, so specs are marked pending # @@ -234,49 +248,24 @@ def enum_conclusions end ## - # Returns the top-level formula for this file + # Returns the concatenated strings from log:outputString + # + # @return [String] + def strings + @options[:strings]. + sort_by {|k, v| k}. + map {|(k,v)| v.join("")}. + join("") + end + + ## + # Returns the top-level formula for this file. + # + # Transforms an RDF dataset into a recursive formula structure. # # @return [RDF::N3::Algebra::Formula] def formula - # SPARQL used for SSE and algebra functionality - require 'sparql' unless defined?(:SPARQL) - - @formula ||= begin - # Create formulae from statement graph_names - formulae = (@mutable.graph_names.unshift(nil)).inject({}) do |memo, graph_name| - memo.merge(graph_name => Algebra::Formula.new(graph_name: graph_name, **@options)) - end - - # Add patterns to appropiate formula based on graph_name, - # and replace subject and object bnodes which identify - # named graphs with those formula - @mutable.each_statement do |statement| - pattern = statement.variable? ? RDF::Query::Pattern.from(statement) : statement - - # A graph name indicates a formula. - form = formulae[pattern.graph_name] - - # Formulae may be the subject or object of a known operator - if klass = Algebra.for(pattern.predicate) - fs = formulae.fetch(pattern.subject, pattern.subject) - fo = formulae.fetch(pattern.object, pattern.object) - form.operands << klass.new(fs, fo, parent: form, **@options) - else - # Add formulae as direct operators - if formulae.has_key?(pattern.subject) - form.operands << formulae[pattern.subject] - end - if formulae.has_key?(pattern.object) - form.operands << formulae[pattern.object] - end - pattern.graph_name = nil - form.operands << pattern - end - end - - # Formula is that without a graph name - formulae[nil] - end + @formula ||= RDF::N3::Algebra::Formula.from_enumerable(@mutable, **@options) end ## diff --git a/lib/rdf/n3/refinements.rb b/lib/rdf/n3/refinements.rb new file mode 100644 index 0000000..c8d6112 --- /dev/null +++ b/lib/rdf/n3/refinements.rb @@ -0,0 +1,178 @@ +# Refinements on core RDF class behavior for RDF::N3. +module RDF::N3::Refinements + # @!parse + # # Refinements on RDF::Term + # module RDF::Term + # ## + # # As a term is constant, this returns itself. + # # + # # @param [Hash{Symbol => RDF::Term}] bindings + # # a query solution containing zero or more variable bindings + # # @param [Hash{Symbol => Object}] options ({}) + # # options passed from query + # # @return [RDF::Term] + # # @see SPARQL::Algebra::Expression.evaluate + # def evaluate(bindings, formulae: nil, **options); end + # end + refine ::RDF::Term do + def evaluate(bindings, formulae:, **options) + self + end + end + + # @!parse + # # Refinements on RDF::Node + # module RDF::Term + # ## + # # Blank node may refer to a formula. + # # + # # @param [Hash{Symbol => RDF::Term}] bindings + # # a query solution containing zero or more variable bindings + # # @param [Hash{Symbol => Object}] options ({}) + # # options passed from query + # # @return [RDF::Node, RDF::N3::Algebra::Formula] + # # @see SPARQL::Algebra::Expression.evaluate + # def evaluate(bindings, formulae:, **options); end + # end + refine ::RDF::Node do + ## + # @return [RDF::Node, RDF::N3::Algebra::Formula] + def evaluate(bindings, formulae:, **options) + node? ? formulae.fetch(self, self) : self + end + end + + # @!parse + # # Refinements on RDF::Statement + # class ::RDF::Statement + # # Refines `valid?` to allow literal subjects and BNode predicates. + # # @return [Boolean] + # def valid?; end + # + # # Refines `invalid?` to allow literal subjects and BNode predicates. + # # @return [Boolean] + # def invalid?; end + # + # # Refines `validate!` to allow literal subjects and BNode predicates. + # # @return [RDF::Value] `self` + # # @raise [ArgumentError] if the value is invalid + # def validate!; end + # + # ## + # # As a statement is constant, this returns itself. + # # + # # @param [Hash{Symbol => RDF::Term}] bindings + # # a query solution containing zero or more variable bindings + # # @param [Hash{Symbol => Object}] options ({}) + # # options passed from query + # # @return [RDF::Statement] + # # @see SPARQL::Algebra::Expression.evaluate + # def evaluate(bindings, formulae:, **options); end + # end + refine ::RDF::Statement do + ## + # Override `valid?` terms as subjects and resources as predicates. + # + # @return [Boolean] + def valid? + has_subject? && subject.term? && subject.valid? && + has_predicate? && predicate.term? && predicate.valid? && + has_object? && object.term? && object.valid? && + (has_graph? ? (graph_name.resource? && graph_name.valid?) : true) + end + + ## + # @return [Boolean] + def invalid? + !valid? + end + + ## + # Default validate! implementation, overridden in concrete classes + # @return [RDF::Value] `self` + # @raise [ArgumentError] if the value is invalid + def validate! + raise ArgumentError, "#{self.inspect} is not valid" if invalid? + self + end + alias_method :validate, :validate! + + ## + # @return [RDF::Statement] + def evaluate(bindings, formulae:, **options) + self + end + end + + # @!parse + # # Refinements on RDF::Query::Pattern + # class ::RDF::Query::Pattern + # # Refines `#valid?` to allow literal subjects and BNode predicates. + # # @return [Boolean] + # def valid?; end + # + # ## + # # Evaluates the pattern using the given variable `bindings` by cloning the pattern replacing variables with their bindings recursively. If the resulting pattern is constant, it is cast as a statement. + # # + # # @param [Hash{Symbol => RDF::Term}] bindings + # # a query solution containing zero or more variable bindings + # # @param [Hash{Symbol => Object}] options ({}) + # # options passed from query + # # @return [RDF::Statement, RDF::N3::Algebra::Formula] + # # @see SPARQL::Algebra::Expression.evaluate + # def evaluate(bindings, formulae:, **options); end + # end + refine ::RDF::Query::Pattern do + ## + # Is this pattern composed only of valid components? + # + # @return [Boolean] `true` or `false` + def valid? + (has_subject? ? (subject.term? || subject.variable?) && subject.valid? : true) && + (has_predicate? ? (predicate.term? || predicate.variable?) && predicate.valid? : true) && + (has_object? ? (object.term? || object.variable?) && object.valid? : true) && + (has_graph? ? (graph_name.resource? || graph_name.variable?) && graph_name.valid? : true) + rescue NoMethodError + false + end + + # @return [RDF::Statement, RDF::N3::Algebra::Formula] + def evaluate(bindings, formulae:, **options) + elements = self.to_quad.map do |term| + term.evaluate(bindings, formulae: formulae, **options) + end.compact.map do |term| + term.node? ? formulae.fetch(term, term) : term + end + + self.class.from(elements) + end + end + + # @!parse + # # Refinements on RDF::Query::Variable + # class RDF::Query::Variable + # ## + # # If variable is bound, replace with the bound value, otherwise, returns itself + # # + # # @param [Hash{Symbol => RDF::Term}] bindings + # # a query solution containing zero or more variable bindings + # # @param [Hash{Symbol => Object}] options ({}) + # # options passed from query + # # @return [RDF::Term] + # # @see SPARQL::Algebra::Expression.evaluate + # def evaluate(bindings, formulae:, **options); end + # end + refine ::RDF::Query::Variable do + ## + # @return [RDF::Term] + def evaluate(bindings, formulae:, **options) + value = bindings.has_key?(name) ? bindings[name] : self + value.node? ? formulae.fetch(value, value) : value + end + end + + refine ::RDF::Graph do + # Allow a graph to be treated as a term in a statement. + include ::RDF::Term + end +end diff --git a/lib/rdf/n3/repository.rb b/lib/rdf/n3/repository.rb new file mode 100644 index 0000000..a50b8e6 --- /dev/null +++ b/lib/rdf/n3/repository.rb @@ -0,0 +1,332 @@ +module RDF::N3 + ## + # Sub-class of RDF::Repository which allows for native lists in different positions. + class Repository < RDF::Repository + DEFAULT_GRAPH = false + + ## + # Initializes this repository instance. + # + # @param [URI, #to_s] uri (nil) + # @param [String, #to_s] title (nil) + # @param [Hash{Symbol => Object}] options + # @option options [Boolean] :with_graph_name (true) + # Indicates that the repository supports named graphs, otherwise, + # only the default graph is supported. + # @option options [Boolean] :with_validity (true) + # Indicates that the repository supports named validation. + # @option options [Boolean] :transaction_class (DEFAULT_TX_CLASS) + # Specifies the RDF::Transaction implementation to use in this Repository. + # @yield [repository] + # @yieldparam [Repository] repository + def initialize(uri: nil, title: nil, **options, &block) + @data = options.delete(:data) || {} + super do + if block_given? + case block.arity + when 1 then block.call(self) + else instance_eval(&block) + end + end + end + end + + ## + # Returns `true` if this respository supports the given `feature`. + # + # This repository supports list_terms. + def supports?(feature) + case feature.to_sym + when :list_terms then true + when :rdfstar then true + when :snapshots then false + else super + end + end + + ## + # Creates a query from the statements in this repository, turning blank nodes into non-distinguished variables. This can be used to determine if this repository is logically a subset of another repository. + # + # @return [RDF::Query] + def to_query + RDF::Query.new do |query| + each do |statement| + query.pattern RDF::Query::Pattern.from(statement, ndvars: true) + end + end + end + + ## + # @private + # @see RDF::Countable#count + def count + count = 0 + @data.each do |_, ss| + ss.each do |_, ps| + ps.each { |_, os| count += os.size } + end + end + count + end + + ## + # @private + # @see RDF::Enumerable#has_graph? + def has_graph?(graph) + @data.has_key?(graph) + end + + ## + # @private + # @see RDF::Enumerable#each_graph + def graph_names(options = nil, &block) + @data.keys.reject { |g| g == DEFAULT_GRAPH }.to_a + end + + ## + # @private + # @see RDF::Enumerable#each_graph + def each_graph(&block) + if block_given? + @data.each_key do |gn| + yield RDF::Graph.new(graph_name: (gn == DEFAULT_GRAPH ? nil : gn), data: self) + end + end + enum_graph + end + + ## + # @private + # @see RDF::Enumerable#has_statement? + def has_statement?(statement) + has_statement_in?(@data, statement) + end + + ## + # @private + # @see RDF::Enumerable#each_statement + def each_statement(&block) + if block_given? + @data.each do |g, ss| + ss.each do |s, ps| + ps.each do |p, os| + os.each do |o, object_options| + yield RDF::Statement.new(s, p, o, object_options.merge(graph_name: g.equal?(DEFAULT_GRAPH) ? nil : g)) + end + end + end + end + end + enum_statement + end + alias_method :each, :each_statement + + ## + # Projects statements with lists expanded to first/rest chains + # + # @yield [RDF::Statement] + def each_expanded_statement(&block) + if block_given? + each_statement do |st| + if st.subject.list? + st.subject.each_statement(&block) + st.subject = st.subject.subject + end + if st.object.list? + st.object.each_statement(&block) + st.object = st.object.subject + end + block.call(st) + end + end + enum_for(:each_expanded_statement) unless block_given? + end + + ## + # Returns the expanded statements for this repository + # + # @return [Array] + def expanded_statements + each_expanded_statement.to_a + end + + ## + # @see Mutable#apply_changeset + def apply_changeset(changeset) + data = @data + changeset.deletes.each do |del| + if del.constant? + data = delete_from(data, del) + else + # we need this condition to handle wildcard statements + query_pattern(del) { |stmt| data = delete_from(data, stmt) } + end + end + changeset.inserts.each { |ins| data = insert_to(data, ins) } + @data = data + end + + ## + # @see RDF::Dataset#isolation_level + def isolation_level + :serializable + end + + protected + + ## + # Match elements with `eql?`, not `==` + # + # `graph_name` of `false` matches default graph. Unbound variable matches + # non-false graph name. + # + # Matches terms which are native lists. + # + # @private + # @see RDF::Queryable#query_pattern + def query_pattern(pattern, **options, &block) + if block_given? + graph_name = pattern.graph_name + subject = pattern.subject + predicate = pattern.predicate + object = pattern.object + + cs = @data.has_key?(graph_name) ? { graph_name => @data[graph_name] } : @data + + cs.each do |c, ss| + next unless graph_name.nil? || + graph_name == DEFAULT_GRAPH && !c || + graph_name.eql?(c) + + ss = if subject.nil? || subject.is_a?(RDF::Query::Variable) + ss + elsif subject.is_a?(RDF::N3::List) + # Match subjects which are eql lists + ss.keys.select {|s| s.list? && subject.eql?(s)}.inject({}) do |memo, li| + memo.merge(li => ss[li]) + end + elsif subject.is_a?(RDF::Query::Pattern) + # Match subjects which are statements matching this pattern + ss.keys.select {|s| s.statement? && subject.eql?(s)}.inject({}) do |memo, st| + memo.merge(st => ss[st]) + end + elsif ss.has_key?(subject) + { subject => ss[subject] } + else + [] + end + ss.each do |s, ps| + ps = if predicate.nil? || predicate.is_a?(RDF::Query::Variable) + ps + elsif predicate.is_a?(RDF::N3::List) + # Match predicates which are eql lists + ps.keys.select {|p| p.list? && predicate.eql?(p)}.inject({}) do |memo, li| + memo.merge(li => ps[li]) + end + elsif ps.has_key?(predicate) + { predicate => ps[predicate] } + else + [] + end + ps.each do |p, os| + os.each do |o, object_options| + next unless object.nil? || object.eql?(o) + yield RDF::Statement.new(s, p, o, object_options.merge(graph_name: c.equal?(DEFAULT_GRAPH) ? nil : c)) + end + end + end + end + else + enum_for(:query_pattern, pattern, **options) + end + end + + ## + # @private + # @see RDF::Mutable#insert + def insert_statement(statement) + @data = insert_to(@data, statement) + end + + ## + # @private + # @see RDF::Mutable#delete + def delete_statement(statement) + @data = delete_from(@data, statement) + end + + ## + # @private + # @see RDF::Mutable#clear + def clear_statements + @data = @data.clear + end + + ## + # @private + # @return [Hash] + def data + @data + end + + ## + # @private + # @return [Hash] + def data=(hash) + @data = hash + end + + private + + ## + # @private + # @see #has_statement + def has_statement_in?(data, statement) + s, p, o, g = statement.to_quad + g ||= DEFAULT_GRAPH + + data.has_key?(g) && + data[g].has_key?(s) && + data[g][s].has_key?(p) && + data[g][s][p].has_key?(o) + end + + ## + # @private + # @return [Hash] a new, updated hash + def insert_to(data, statement) + raise ArgumentError, "Statement #{statement.inspect} is incomplete" if statement.incomplete? + + s, p, o, c = statement.to_quad + c ||= DEFAULT_GRAPH + unless has_statement_in?(data, statement) + data = data.has_key?(c) ? data.dup : data.merge(c => {}) + data[c] = data[c].has_key?(s) ? data[c].dup : data[c].merge(s => {}) + data[c][s] = data[c][s].has_key?(p) ? data[c][s].dup : data[c][s].merge(p => {}) + data[c][s][p] = data[c][s][p].merge(o => statement.options) + end + + # If statement is inferred, make sure that it is marked as inferred in the dataset. + data[c][s][p][o][:inferred] = true if statement.options[:inferred] + + data + end + + ## + # @private + # @return [Hash] a new, updated hash + def delete_from(data, statement) + if has_statement_in?(data, statement) + s, p, o, g = statement.to_quad + g = DEFAULT_GRAPH unless supports?(:graph_name) + g ||= DEFAULT_GRAPH + + os = data[g][s][p].dup.delete_if {|k,v| k == o} + ps = os.empty? ? data[g][s].dup.delete_if {|k,v| k == p} : data[g][s].merge(p => os) + ss = ps.empty? ? data[g].dup.delete_if {|k,v| k == s} : data[g].merge(s => ps) + return ss.empty? ? data.dup.delete_if {|k,v| k == g} : data.merge(g => ss) + end + data + end + end +end diff --git a/lib/rdf/n3/terminals.rb b/lib/rdf/n3/terminals.rb new file mode 100644 index 0000000..4dae60a --- /dev/null +++ b/lib/rdf/n3/terminals.rb @@ -0,0 +1,80 @@ +# encoding: utf-8 +module RDF::N3 + module Terminals + # Definitions of token regular expressions used for lexical analysis + ## + # Unicode regular expressions for Ruby 1.9+ with the Oniguruma engine. + U_CHARS1 = Regexp.compile(<<-EOS.gsub(/\s+/, '')) + [\\u00C0-\\u00D6]|[\\u00D8-\\u00F6]|[\\u00F8-\\u02FF]| + [\\u0370-\\u037D]|[\\u037F-\\u1FFF]|[\\u200C-\\u200D]| + [\\u2070-\\u218F]|[\\u2C00-\\u2FEF]|[\\u3001-\\uD7FF]| + [\\uF900-\\uFDCF]|[\\uFDF0-\\uFFFD]|[\\u{10000}-\\u{EFFFF}] + EOS + U_CHARS2 = Regexp.compile("\\u00B7|[\\u0300-\\u036F]|[\\u203F-\\u2040]", Regexp::FIXEDENCODING).freeze + IRI_RANGE = Regexp.compile("[[^<>\"{}|^`\\\\]&&[^\\x00-\\x20]]", Regexp::FIXEDENCODING).freeze + + ESCAPE_CHAR4 = /\\u(?:[0-9A-Fa-f]{4,4})/u.freeze # \uXXXX + ESCAPE_CHAR8 = /\\U(?:[0-9A-Fa-f]{8,8})/u.freeze # \UXXXXXXXX + UCHAR = /#{ESCAPE_CHAR4}|#{ESCAPE_CHAR8}/n.freeze + # 170s + PERCENT = /%[0-9A-Fa-f]{2}/u.freeze + # 172s + PN_LOCAL_ESC = /\\[_~\.\-\!$\&'\(\)\*\+,;=\/\?\#@%]/u.freeze + # 169s + PLX = /#{PERCENT}|#{PN_LOCAL_ESC}/u.freeze + # 163s + PN_CHARS_BASE = /[A-Z]|[a-z]|#{U_CHARS1}/u.freeze + # 164s + PN_CHARS_U = /_|#{PN_CHARS_BASE}/u.freeze + # 166s + PN_CHARS = /-|[0-9]|#{PN_CHARS_U}|#{U_CHARS2}/u.freeze + PN_LOCAL_BODY = /(?:(?:\.|:|#{PN_CHARS}|#{PLX})*(?:#{PN_CHARS}|:|#{PLX}))?/u.freeze + PN_CHARS_BODY = /(?:(?:\.|#{PN_CHARS})*#{PN_CHARS})?/u.freeze + # 167s + PN_PREFIX = /#{PN_CHARS_BASE}#{PN_CHARS_BODY}/u.freeze + # 168s + PN_LOCAL = /(?:[0-9]|:|#{PN_CHARS_U}|#{PLX})#{PN_LOCAL_BODY}/u.freeze + # 154s + EXPONENT = /[eE][+-]?[0-9]+/u.freeze + # 159s + ECHAR = /\\[tbnrf\\"']/u.freeze + # 18 + IRIREF = /<(?:#{IRI_RANGE}|#{UCHAR})*>/mu.freeze + # 139s + PNAME_NS = /#{PN_PREFIX}?:/u.freeze + # 140s + PNAME_LN = /#{PNAME_NS}#{PN_LOCAL}/u.freeze + # 141s + BLANK_NODE_LABEL = /_:(?:[0-9]|#{PN_CHARS_U})(?:(?:#{PN_CHARS}|\.)*#{PN_CHARS})?/u.freeze + # 144s + # XXX: negative-lookahed for @is and @has + LANGTAG = /@(?!(?:is|has))(?:[a-zA-Z]+(?:-[a-zA-Z0-9]+)*)/u.freeze + # 19 + INTEGER = /[+-]?[0-9]+/u.freeze + # 20 + DECIMAL = /[+-]?(?:[0-9]*\.[0-9]+)/u.freeze + # 21 + DOUBLE = /[+-]?(?:[0-9]+\.[0-9]*#{EXPONENT}|\.?[0-9]+#{EXPONENT})/u.freeze + # 22 + STRING_LITERAL_SINGLE_QUOTE = /'(?:[^\'\\\n\r]|#{ECHAR}|#{UCHAR})*'/u.freeze + # 23 + STRING_LITERAL_QUOTE = /"(?:[^\"\\\n\r]|#{ECHAR}|#{UCHAR})*"/u.freeze + # 24 + STRING_LITERAL_LONG_SINGLE_QUOTE = /'''(?:(?:'|'')?(?:[^'\\]|#{ECHAR}|#{UCHAR}))*'''/um.freeze + # 25 + STRING_LITERAL_LONG_QUOTE = /"""(?:(?:"|"")?(?:[^"\\]|#{ECHAR}|#{UCHAR}))*"""/um.freeze + + # 28t + PREFIX = /@?prefix/ui.freeze + # 29t + BASE = /@?base/ui.freeze + QUICK_VAR_NAME = /\?#{PN_LOCAL}/.freeze + + # 161s + WS = /(?:\s|(?:#[^\n\r]*))+/um.freeze + # 162s + ANON = /\[\s*\]/u.freeze + + FORALL = /@forAll/u.freeze + end +end \ No newline at end of file diff --git a/lib/rdf/n3/vocab.rb b/lib/rdf/n3/vocab.rb index bf0e009..2917421 100644 --- a/lib/rdf/n3/vocab.rb +++ b/lib/rdf/n3/vocab.rb @@ -1,9 +1,37 @@ module RDF::N3 - class Crypto < RDF::Vocabulary("http://www.w3.org/2000/10/swap/crypto#"); end - class List < RDF::Vocabulary("http://www.w3.org/2000/10/swap/list#"); end - class Log < RDF::Vocabulary("http://www.w3.org/2000/10/swap/log#"); end - class Math < RDF::Vocabulary("http://www.w3.org/2000/10/swap/math#"); end - class Rei < RDF::Vocabulary("http://www.w3.org/2000/10/swap/reify#"); end - class Str < RDF::Vocabulary("http://www.w3.org/2000/10/swap/string#"); end - class Time < RDF::Vocabulary("http://www.w3.org/2000/10/swap/time#"); end + # @!parse + # # Crypto namespace + # class Crypto < RDF::Vocabulary; end + const_set("Crypto", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/crypto#"))) + RDF::Vocabulary.register(:crypto, Crypto) + + # @!parse + # # Log namespace + # class Log < RDF::Vocabulary; end + const_set("Log", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/log#"))) + RDF::Vocabulary.register(:log, Log) + + # @!parse + # # Math namespace + # class Math < RDF::Vocabulary; end + const_set("Math", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/math#"))) + RDF::Vocabulary.register(:math, Math) + + # @!parse + # # Rei namespace + # class Rei < RDF::Vocabulary; end + const_set("Rei", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/reify#"))) + RDF::Vocabulary.register(:rei, Rei) + + # @!parse + # # Str namespace + # class Str < RDF::Vocabulary; end + const_set("Str", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/string#"))) + RDF::Vocabulary.register(:string, Str) + + # @!parse + # # Time namespace + # class Time < RDF::Vocabulary; end + const_set("Time", Class.new(RDF::Vocabulary("http://www.w3.org/2000/10/swap/time#"))) + RDF::Vocabulary.register(:time, Time) end diff --git a/lib/rdf/n3/writer.rb b/lib/rdf/n3/writer.rb index d8470ad..58a4360 100644 --- a/lib/rdf/n3/writer.rb +++ b/lib/rdf/n3/writer.rb @@ -49,7 +49,8 @@ module RDF::N3 class Writer < RDF::Writer format RDF::N3::Format include RDF::Util::Logger - QNAME = Meta::REGEXPS[:"http://www.w3.org/2000/10/swap/grammar/n3#qname"] + include Terminals + using Refinements # @return [RDF::Repository] Repository of statements serialized attr_accessor :repo @@ -57,6 +58,9 @@ class Writer < RDF::Writer # @return [RDF::Graph] Graph being serialized attr_accessor :graph + # @return [Array] formulae names + attr_accessor :formula_names + ## # N3 Writer options # @see http://www.rubydoc.info/github/ruby-rdf/rdf/RDF/Writer#options-class_method @@ -104,10 +108,13 @@ def self.options # @yield [writer] # @yieldparam [RDF::Writer] writer def initialize(output = $stdout, **options, &block) - @repo = RDF::Repository.new + @repo = RDF::N3::Repository.new @uri_to_pname = {} @uri_to_prefix = {} super do + if base_uri + @uri_to_prefix[base_uri.to_s.end_with?('#', '/') ? base_uri : RDF::URI("#{base_uri}#")] = nil + end reset if block_given? case block.arity @@ -152,12 +159,14 @@ def write_epilogue self.reset - log_debug {"\nserialize: repo: #{repo.size}"} + log_debug("\nserialize: repo:") {repo.size} preprocess start_document + @formula_names = repo.graph_names(unique: true) + with_graph(nil) do count = 0 order_subjects.each do |subject| @@ -168,10 +177,13 @@ def write_epilogue end # Output any formulae not already serialized using owl:sameAs - repo.graph_names.each do |graph_name| + formula_names.each do |graph_name| next if graph_done?(graph_name) - log_debug {"named graph(#{graph_name})"} + # Add graph_name to @formulae + @formulae[graph_name] = true + + log_debug {"formula(#{graph_name})"} @output.write("\n#{indent}") p_term(graph_name, :subject) @output.write(" ") @@ -200,7 +212,7 @@ def get_pname(resource) #log_debug {"get_pname(#{resource}), std?}"} pname = case - when @uri_to_pname.has_key?(uri) + when @uri_to_pname.key?(uri) return @uri_to_pname[uri] when u = @uri_to_prefix.keys.detect {|u| uri.index(u.to_s) == 0} # Use a defined prefix @@ -222,34 +234,31 @@ def get_pname(resource) # Make sure pname is a valid pname if pname - md = QNAME.match(pname) + md = PNAME_LN.match(pname) || PNAME_NS.match(pname) pname = nil unless md.to_s.length == pname.length end @uri_to_pname[uri] = pname - rescue Addressable::URI::InvalidURIError => e - raise RDF::WriterError, "Invalid URI #{resource.inspect}: #{e.message}" end # Take a hash from predicate uris to lists of values. # Sort the lists of values. Return a sorted list of properties. - # @param [Hash{String => Array}] properties A hash of Property to Resource mappings - # @return [Array}] Ordered list of properties. Uses predicate_order. + # @param [Hash{RDF::Term => Array}] properties A hash of Property to Resource mappings + # @return [Array}] Ordered list of properties. Uses predicate_order. def sort_properties(properties) # Make sorted list of properties prop_list = [] predicate_order.each do |prop| - next unless properties[prop.to_s] - prop_list << prop.to_s + next unless properties.key?(prop) + prop_list << prop end properties.keys.sort.each do |prop| - next if prop_list.include?(prop.to_s) - prop_list << prop.to_s + next if prop_list.include?(prop) + prop_list << prop end - log_debug {"sort_properties: #{prop_list.join(', ')}"} prop_list end @@ -267,7 +276,11 @@ def format_literal(literal, **options) when RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.decimal literal.canonicalize.to_s when RDF::XSD.double - literal.canonicalize.to_s.sub('E', 'e') # Favor lower case exponent + if literal.nan? || literal.infinite? + quoted(literal.value) + "^^#{format_uri(literal.datatype)}" + else + literal.canonicalize.to_s + end else text = quoted(literal.value) text << "@#{literal.language}" if literal.has_language? @@ -286,8 +299,8 @@ def format_literal(literal, **options) # @param [Hash{Symbol => Object}] options # @return [String] def format_uri(uri, **options) - md = uri.relativize(base_uri) - log_debug("relativize") {"#{uri.to_sxp} => #{md.inspect}"} if md != uri.to_s + md = uri == base_uri ? '' : uri.relativize(base_uri) + log_debug("relativize") {"#{uri.to_sxp} => <#{md.inspect}>"} if md != uri.to_s md != uri.to_s ? "<#{md}>" : (get_pname(uri) || "<#{uri}>") end @@ -298,7 +311,15 @@ def format_uri(uri, **options) # @param [Hash{Symbol => Object}] options # @return [String] def format_node(node, **options) - options[:unique_bnodes] ? node.to_unique_base : node.to_base + if node.id.match(/^([^_]+)_[^_]+_([^_]+)$/) + sn, seq = $1, $2.to_i + seq = nil if seq == 0 + "_:#{sn}#{seq}" + elsif options[:unique_bnodes] + node.to_unique_base + else + node.to_base + end end protected @@ -306,20 +327,21 @@ def format_node(node, **options) def start_document @output.write("@base <#{base_uri}> .\n") unless base_uri.to_s.empty? - log_debug {"start_document: prefixes #{prefixes.inspect}"} + log_debug("start_document: prefixes") { prefixes.inspect} prefixes.keys.sort_by(&:to_s).each do |prefix| @output.write("@prefix #{prefix}: <#{prefixes[prefix]}> .\n") end + # Universals and extentials at top-level unless @universals.empty? - log_debug {"start_document: universals #{@universals.inspect}"} + log_debug("start_document: universals") { @universals.inspect} terms = @universals.map {|v| format_uri(RDF::URI(v.name.to_s))} @output.write("@forAll #{terms.join(', ')} .\n") end unless @existentials.empty? - log_debug {"start_document: universals #{@existentials.inspect}"} - terms = @existentials.map {|v| format_uri(RDF::URI(v.name.to_s))} + log_debug("start_document: existentials") { @existentials.inspect} + terms = @existentials.map {|v| format_uri(RDF::URI(v.name.to_s.sub(/_ext$/, '')))} @output.write("@forSome #{terms.join(', ')} .\n") end end @@ -331,7 +353,17 @@ def top_classes; [RDF::RDFS.Class]; end # Defines order of predicates to to emit at begninning of a resource description. Defaults to # [rdf:type, rdfs:label, dc:title] # @return [Array] - def predicate_order; [RDF.type, RDF::RDFS.label, RDF::URI("http://purl.org/dc/terms/title")]; end + def predicate_order + [ + RDF.type, + RDF::RDFS.label, + RDF::RDFS.comment, + RDF::URI("http://purl.org/dc/terms/title"), + RDF::URI("http://purl.org/dc/terms/description"), + RDF::OWL.sameAs, + RDF::N3::Log.implies + ] + end # Order subjects for output. Override this to output subjects in another order. # @@ -342,7 +374,7 @@ def order_subjects subjects = [] # Start with base_uri - if base_uri && @subjects.keys.include?(base_uri) + if base_uri && @subjects.keys.select(&:uri?).include?(base_uri) subjects << base_uri seen[base_uri] = true end @@ -350,24 +382,26 @@ def order_subjects # Add distinguished classes top_classes.each do |class_uri| graph.query({predicate: RDF.type, object: class_uri}). - map {|st| st.subject}. - sort. - uniq. - each do |subject| - log_debug("order_subjects") {subject.to_sxp} - subjects << subject - seen[subject] = true - end + map {|st| st.subject}.sort.uniq.each do |subject| + log_debug("order_subjects") {subject.to_sxp} + subjects << subject + seen[subject] = true + end + end + + # Add formulae which are subjects in this graph + @formulae.each_key do |bn| + next unless @subjects.key?(bn) + subjects << bn + seen[bn] = true end # Mark as seen lists that are part of another list - @lists.values.map(&:statements). - flatten.each do |st| - seen[st.object] = true if @lists.has_key?(st.object) - end + @lists.values.flatten.each do |v| + seen[v] = true if @lists.key?(v) + end - # List elements which are bnodes should not be targets for top-level serialization - list_elements = @lists.values.map(&:to_a).flatten.select(&:node?).compact + list_elements = [] # Lists may be top-level elements # Sort subjects by resources over bnodes, ref_counts and the subject URI itself recursable = (@subjects.keys - list_elements). @@ -391,7 +425,7 @@ def preprocess @options[:prefixes] = {} # Will define actual used when matched repo.each {|statement| preprocess_statement(statement)} - vars = repo.enum_term.to_a.uniq.select {|r| r.is_a?(RDF::Query::Variable)} + vars = repo.enum_term.to_a.uniq.select {|r| r.is_a?(RDF::Query::Variable) && !r.to_s.end_with?('_quick')} @universals = vars.reject(&:existential?) @existentials = vars - @universals end @@ -400,7 +434,7 @@ def preprocess # prefixes. # @param [Statement] statement def preprocess_statement(statement) - #log_debug {"preprocess: #{statement.inspect}"} + #log_debug("preprocess") {statement.inspect} # Pre-fetch pnames, to fill prefixes get_pname(statement.subject) @@ -410,24 +444,13 @@ def preprocess_statement(statement) end # Perform graph-specific preprocessing - # @param [Statement] + # @param [Statement] statement def preprocess_graph_statement(statement) bump_reference(statement.object) # Count properties of this subject @subjects[statement.subject] ||= {} @subjects[statement.subject][statement.predicate] ||= 0 @subjects[statement.subject][statement.predicate] += 1 - - # Collect lists - if statement.predicate == RDF.first - l = RDF::List.new(subject: statement.subject, graph: graph) - @lists[statement.subject] = l if l.valid? - end - - if statement.object == RDF.nil || statement.subject == RDF.nil - # Add an entry for the list tail - @lists[RDF.nil] ||= RDF::List[] - end end # Returns indent string multiplied by the depth @@ -464,35 +487,33 @@ def quoted(string) private # Checks if l is a valid RDF list, i.e. no nodes have other properties. - def is_valid_list?(l) - #log_debug("is_valid_list?") {l.inspect} - return @lists[l] && @lists[l].valid? - end - - def do_list(l, position) - list = @lists[l] - log_debug("do_list") {list.inspect} - subject_done(RDF.nil) - index = 0 - list.each_statement do |st| - next unless st.predicate == RDF.first - log_debug {" list this: #{st.subject} first: #{st.object}[#{position}]"} - @output.write(" ") if index > 0 - path(st.object, position) - subject_done(st.subject) - position = :object - index += 1 - end + def collection?(l) + return @lists.key?(l) || l.list? end def collection(node, position) - return false if !is_valid_list?(node) - return false if position == :subject && ref_count(node) > 0 - return false if position == :object && prop_count(node) > 0 - #log_debug("collection") {"#{node.to_sxp}, #{position}"} + return false if !collection?(node) + log_debug("collection") do + "#{node.to_sxp}, " + + "pos: #{position}, " + + "rc: #{ref_count(node)}" + end @output.write("(") - log_depth {do_list(node, position)} + log_depth do + list = node.list? ? node : @lists[node] + log_debug("collection") {list.inspect} + subject_done(RDF.nil) + subject_done(node) + index = 0 + list.each do |li| + log_debug("(list first)") {"#{li}[#{position}]"} + @output.write(" ") if index > 0 + path(li, :object) + subject_done(li) + index += 1 + end + end @output.write(')') end @@ -500,7 +521,11 @@ def collection(node, position) def p_term(resource, position) #log_debug("p_term") {"#{resource.to_sxp}, #{position}"} l = if resource.is_a?(RDF::Query::Variable) - format_term(RDF::URI(resource.name.to_s.sub(/^\$/, ''))) + if resource.to_s.end_with?('_quick') + '?' + RDF::URI(resource.name).fragment.sub(/_quick$/, '') + else + format_term(RDF::URI(resource.name.to_s.sub(/_ext$/, ''))) + end elsif resource == RDF.nil "()" else @@ -515,9 +540,9 @@ def path(resource, position) log_debug("path") do "#{resource.to_sxp}, " + "pos: #{position}, " + - "{}?: #{formula?(resource, position)}, " + - "()?: #{is_valid_list?(resource)}, " + - "[]?: #{blankNodePropertyList?(resource, position)}, " + + "{}?: #{formula?(resource, position).inspect}, " + + "()?: #{collection?(resource).inspect}, " + + "[]?: #{blankNodePropertyList?(resource, position).inspect}, " + "rc: #{ref_count(resource)}" end raise RDF::WriterError, "Cannot serialize resource '#{resource}'" unless @@ -537,7 +562,7 @@ def predicate(resource) when RDF::N3::Log.implies @output.write("=>") else - path(resource, :predicate) + log_depth {path(resource, :predicate)} end end @@ -546,13 +571,15 @@ def objectList(objects) log_debug("objectList") {objects.inspect} return if objects.empty? - objects.each_with_index do |obj, i| - if i > 0 && (formula?(obj, :object) || blankNodePropertyList?(obj, :object)) - @output.write ", " - elsif i > 0 - @output.write ",\n#{indent(4)}" + log_depth do + objects.each_with_index do |obj, i| + if i > 0 && (formula?(obj, :object) || blankNodePropertyList?(obj, :object)) + @output.write ", " + elsif i > 0 + @output.write ",\n#{indent(4)}" + end + path(obj, :object) end - path(obj, :object) end end @@ -560,29 +587,24 @@ def objectList(objects) # @return [Integer] the number of properties serialized def predicateObjectList(subject, from_bpl = false) properties = {} - if subject.variable? - # Can't query on variable - @graph.enum_statement.select {|s| s.subject.equal?(subject)}.each do |st| - (properties[st.predicate.to_s] ||= []) << st.object - end - else - @graph.query({subject: subject}) do |st| - (properties[st.predicate.to_s] ||= []) << st.object - end + @graph.enum_statement.select {|s| s.subject.sameTerm?(subject)}.each do |st| + (properties[st.predicate] ||= []) << st.object end prop_list = sort_properties(properties) - prop_list -= [RDF.first.to_s, RDF.rest.to_s] if @lists.include?(subject) - log_debug("predicateObjectList") {prop_list.inspect} + prop_list -= [RDF.first, RDF.rest] if @lists.key?(subject) + log_debug("predicateObjectList") { "subject: #{subject.to_sxp}, properties: #{prop_list.join(', ')}" } return 0 if prop_list.empty? @output.write("\n#{indent(2)}") if properties.keys.length > 1 && from_bpl - prop_list.each_with_index do |prop, i| - begin - @output.write(";\n#{indent(2)}") if i > 0 - predicate(RDF::URI.intern(prop)) - @output.write(" ") - objectList(properties[prop]) + log_depth do + prop_list.each_with_index do |prop, i| + begin + @output.write(";\n#{indent(2)}") if i > 0 + predicate(prop) + @output.write(" ") + objectList(properties[prop]) + end end end properties.keys.length @@ -592,10 +614,9 @@ def predicateObjectList(subject, from_bpl = false) def blankNodePropertyList?(resource, position) resource.node? && !formula?(resource, position) && - !is_valid_list?(resource) && + !collection?(resource) && (!is_done?(resource) || position == :subject) && ref_count(resource) == (position == :object ? 1 : 0) && - resource_in_single_graph?(resource) && !repo.has_graph?(resource) end @@ -604,20 +625,15 @@ def blankNodePropertyList(resource, position) log_debug("blankNodePropertyList") {resource.to_sxp} subject_done(resource) - @output.write(position == :subject ? "\n#{indent}[" : '[') + @output.write((position == :subject ? "\n#{indent}[" : '[')) num_props = log_depth {predicateObjectList(resource, true)} - @output.write((num_props > 1 ? "\n#{indent(2)}" : "") + (position == :object ? ']' : '] .')) + @output.write((num_props > 1 ? "\n#{indent(2)}" : "") + (position == :subject ? '] .' : ']')) true end # Can subject be represented as a formula? def formula?(resource, position) - (resource.node? || position == :graph_name) && - repo.has_graph?(resource) && - !is_valid_list?(resource) && - (!is_done?(resource) || position == :subject) && - ref_count(resource) == (position == :object ? 1 : 0) && - resource_in_single_graph?(resource) + !!@formulae[resource] end def formula(resource, position) @@ -626,9 +642,9 @@ def formula(resource, position) log_debug("formula") {resource.to_sxp} subject_done(resource) @output.write('{') + count = 0 log_depth do with_graph(resource) do - count = 0 order_subjects.each do |subject| unless is_done?(subject) statement(subject, count) @@ -637,7 +653,7 @@ def formula(resource, position) end end end - @output.write((graph.count > 1 ? "\n#{indent}" : "") + '}') + @output.write((count > 0 ? "#{indent}" : "") + '}') true end @@ -647,25 +663,22 @@ def triples(subject) path(subject, :subject) @output.write(" ") num_props = predicateObjectList(subject) - @output.write("#{num_props > 0 ? ' ' : ''}.") + @output.puts("#{num_props > 0 ? ' ' : ''}.") true end def statement(subject, count) - log_debug("statement") {"#{subject.to_sxp}, bnodePL?: #{blankNodePropertyList?(subject, :subject)}"} + log_debug("statement") do + "#{subject.to_sxp}, " + + "{}?: #{formula?(subject, :subject).inspect}, " + + "()?: #{collection?(subject).inspect}, " + + "[]?: #{blankNodePropertyList?(subject, :subject).inspect}, " + end subject_done(subject) blankNodePropertyList(subject, :subject) || triples(subject) @output.puts if count > 0 || graph.graph_name end - # Return the number of statements having this resource as a subject other than for list properties - # @return [Integer] - def prop_count(subject) - @subjects.fetch(subject, {}). - reject {|k, v| [RDF.type, RDF.first, RDF.rest].include?(k)}. - values.reduce(:+) || 0 - end - # Return the number of times this node has been referenced in the object position # @return [Integer] def ref_count(node) @@ -697,19 +710,6 @@ def graph_done(graph_name) @graphs[graph_name] = true end - def resource_in_single_graph?(resource) - if resource.variable? - graph_names = @repo. - enum_statement. - select {|st| st.subject.equal?(resource) || st.object.equal?(resource)}. - map(&:graph_name) - else - graph_names = @repo.query({subject: resource}).map(&:graph_name) - graph_names += @repo.query({object: resource}).map(&:graph_name) - end - graph_names.uniq.length <= 1 - end - # Process a graph projection def with_graph(graph_name) old_lists, @lists = @lists, {} @@ -717,21 +717,81 @@ def with_graph(graph_name) old_serialized, @serialized = @serialized, {} old_subjects, @subjects = @subjects, {} old_graph, @graph = @graph, repo.project_graph(graph_name) + old_formulae, @formulae = @formulae, {} graph_done(graph_name) - graph.each {|statement| preprocess_graph_statement(statement)} + lists = {} + graph.each do |statement| + preprocess_graph_statement(statement) + [statement.subject, statement.object].each do |resource| + @formulae[resource] = true if + resource.node? && + (formula_names.include?(resource) || resource.id.start_with?('_form_')) + + # First-class list may have members which are formulae, and need reference counts + if resource.list? + resource.each_descendant do |term| + bump_reference(term) + @formulae[term] = true if + term.node? && + (formula_names.include?(term) || term.id.start_with?('_form_')) + end + end + end + + # Collect list elements + if [RDF.first, RDF.rest].include?(statement.predicate) && statement.subject.node? + lists[statement.subject] ||= {} + lists[statement.subject][statement.predicate] = statement.object + end + end + + # Remove list entries after head with more than two properties (other than rdf:type) + rests = lists.values.map {|props| props[RDF.rest]} + + # Remove non-head lists that have too many properties + rests.select do |bn| + pc = 0 + @subjects.fetch(bn, {}).each do |pred, count| + next if pred == RDF.type + pc += count + end + lists.delete(bn) if pc > 2 + end + + # Values for this list element, recursive + def list_values(bn, lists) + raise "no list" unless lists.has_key?(bn) + first, rest = lists[bn][RDF.first], lists[bn][RDF.rest] + (rest == RDF.nil ? [] : list_values(rest, lists)).unshift(first) + rescue + lists.delete(bn) + raise $! + end + + # Create value arrays for each entry + lists.each do |bn, props| + begin + @lists[bn] = list_values(bn, lists) + rescue + # Skip this list element, if it raises an exception + lists.delete(bn) + end + end + + # Mark all remaining rests done + rests.each {|bn| subject_done(bn) if lists.include?(bn)} - # Remove lists that are referenced and have non-list properties; - # these are legal, but can't be serialized as lists - @lists.reject! do |node, list| - ref_count(node) > 0 && prop_count(node) > 0 || - list.subjects.any? {|elt| !resource_in_single_graph?(elt)} + # Remove entries that are referenced as rdf:rest of some entry + lists.each do |bn, props| + @lists.delete(props[RDF.rest]) end + # Record nodes in subject or object yield ensure - @graph, @lists, @references, @serialized, @subjects = old_graph, old_lists, old_references, old_serialized, old_subjects + @graph, @lists, @references, @serialized, @subjects, @formulae = old_graph, old_lists, old_references, old_serialized, old_subjects, old_formulae end end end diff --git a/rdf-n3.gemspec b/rdf-n3.gemspec index 73e125d..21b65f8 100755 --- a/rdf-n3.gemspec +++ b/rdf-n3.gemspec @@ -15,24 +15,25 @@ Gem::Specification.new do |gem| gem.email = 'public-rdf-ruby@w3.org' gem.platform = Gem::Platform::RUBY - gem.files = %w(README.md History.markdown AUTHORS VERSION UNLICENSE) + Dir.glob('lib/**/*.rb') + gem.files = %w(README.md VERSION UNLICENSE) + Dir.glob('lib/**/*.rb') gem.require_paths = %w(lib) gem.required_ruby_version = '>= 2.4' gem.requirements = [] - gem.add_dependency 'rdf', '~> 3.1' - gem.add_dependency 'sparql', '~> 3.1' + gem.add_dependency 'ebnf', '~> 2.1' + gem.add_dependency 'rdf', '~> 3.1', '>= 3.1.8' + gem.add_dependency 'sparql', '~> 3.1', '>= 3.1.4' gem.add_runtime_dependency 'sxp', '~> 1.1' gem.add_development_dependency 'json-ld', '~> 3.1' - gem.add_development_dependency 'rspec', '~> 3.9' - gem.add_development_dependency 'rspec-its', '~> 1.3' gem.add_development_dependency 'rdf-spec', '~> 3.1' - gem.add_development_dependency 'rdf-isomorphic', '~> 3.0' + gem.add_development_dependency 'rdf-isomorphic', '~> 3.1' gem.add_development_dependency 'rdf-trig', '~> 3.1' gem.add_development_dependency 'rdf-vocab', '~> 3.1' - gem.add_development_dependency 'yard' , '~> 0.9.20' + gem.add_development_dependency 'rspec', '~> 3.10' + gem.add_development_dependency 'rspec-its', '~> 1.3' + gem.add_development_dependency 'yard' , '~> 0.9' gem.post_install_message = nil end diff --git a/script/build_meta b/script/build_meta deleted file mode 100755 index 954ac87..0000000 --- a/script/build_meta +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/env ruby -# build_meta --- generator of parser tables for SPARQL::Grammar::Parser -# Derived from: -# http://www.w3.org/2000/10/swap/grammar/predictiveParser.py -# - predictiveParser.py, Tim Berners-Lee, 2004 -# -require 'rubygems' -$:.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", 'lib'))) -require 'rdf/n3' -require 'getoptlong' - -# Build rdf/n3/parser/meta.rb from http://www.w3.org/2000/10/swap/grammar/n3-selectors.n3 - -class BNF < RDF::Vocabulary("http://www.w3.org/2000/10/swap/grammar/bnf#"); end -class REGEX < RDF::Vocabulary("http://www.w3.org/2000/10/swap/grammar/regex#"); end -class N3 < RDF::Vocabulary("http://www.w3.org/2000/10/swap/grammar/n3#"); end - -class PredictiveParser - attr_accessor :already, :agenda, :errors, :literalTerminals, :branchTable, :tokenRegexps - attr_accessor :graph - - def initialize - @already = [] - @agenda = [] - @errors = [] - @literalTerminals = {} - @branchTable = {} - @tokenRegexps = {} - end - - def parse(file) - progress("Loading " + file) - @graph = RDF::Graph.load(file) - progress("Loaded #{@graph.count} statements.") - end - - def recordError(str) - errors << str - "##### ERROR: #{str}" - end - - def progress(str); puts(str); end - def chatty(str); progress(str) if $verbose; end - - def runProduction(lhs) - doProduction(lhs) - while !@agenda.empty? - x = @agenda.shift - @already << x - doProduction(x) - end - - if !@errors.empty? - progress("###### FAILED with #{errors.length} errors.") - @errors.each {|s| progress(" #{s}")} - exit(-2) - else - progress("Ok for predictive parsing") - end - end - - # Generate branch tables for one production - def doProduction(lhs) - if lhs == BNF.void - progress("\nvoid") - return - end - if lhs == BNF.eof - progress( "\nEOF") - return - end - if lhs.is_a?(RDF::Literal) - literalTerminals[lhs.value()] = 1 - return - end - - branchDict = {} - - rhs = graph.first_object(subject: lhs, predicate: BNF.matches) - if rhs - chatty("\nToken #{lhs} matches regexp #{rhs}") - tokenRegexps[lhs] = rhs.value - - cc = graph.query({subject: lhs, predicate: BNF.canStartWith}) - progress(recordError("No record of what token #{lhs} can start with")) if cc.empty? - cc.each {|statement| chatty(" Can start with: #{statement.object}")} - return - end - - rhs = graph.first_object(subject: lhs, predicate: BNF.mustBeOneSequence) - unless rhs - progress(recordError("No definition of #{lhs}")) - # raise RuntimeError("No definition of %s in\n %s" %(`lhs`, `g`)) - return - end - - options = rhs - progress("\nProduction #{lhs} :: #{options}") - graph.query({subject: lhs, predicate: BNF.canPrecede}) do |statement| - chatty(" Can precede '#{statement.object}'") - end - - graph.query({subject: lhs, predicate: BNF.branch}) do |statement| - branch = statement.object - sequence = graph.first_object(subject: statement.object, predicate: BNF.sequence) - option = RDF::List.new(subject: sequence, graph: graph).to_a - progress(" option: #{option}") - - option.each do |part| - agenda << part unless already.include?(part) || agenda.include?(part) - end - - conditions = graph.query({subject: branch, predicate: BNF.condition}).map(&:object) - if conditions.empty? - progress(recordError("NO SELECTOR for #{lhs} option #{option}")) - if option.empty? - # Void case - the tricky one - graph.pattern(subject: lhs, predicate: BNF.canPrecede) do |st| - progress(" Can precede #{st.object}") - end - end - end - - progress(" Conditions: #{conditions.to_a.map(&:to_s)}") - conditions.each do |str1| - if branchDict.has_key?(str1) - progress( - "Conflict: #{str1} is also the condition for #{branchDict[str1]}") - end - branchDict[str1] = option - end - end - - branchDict.keys.each do |str1| - branchDict.keys.each do |str2| - s1, s2 = str1.to_s, str2.to_s - if (s1.index(s2) == 0 || s2.index(s1) == 0) && branchDict[str1] != branchDict[str2] - progress("WARNING: for #{lhs}, #{str1} indicates #{branchDict[str1]}, but #{str2} indicates #{branchDict[str2]}") - end - end - end - - branchTable[lhs] = branchDict - end - - def litOrNot(value) - (value.is_a?(RDF::Literal) ? "" : ":") + value.to_s.dump - end - - def outputBranchTable(io, indent = 0) - ind0 = ' ' * indent - ind1 = ind0 + ' ' - ind2 = ind1 + ' ' - ind3 = ind2 + ' ' - io.puts "#{ind0}BRANCHES = {" - branchTable.keys.sort_by(&:to_s).each do |prod| - # Special case double, integer, and decimal to output just a numericliteral, due to a parser conflict - next if prod.to_s =~ /numericliteral/ - io.puts "#{ind1}#{litOrNot(prod)} => {" - branchTable[prod].keys.sort_by(&:to_s).each do |term| - list = branchTable[prod][term].map {|t2| litOrNot(t2)}.join(",\n#{ind3}") - io.puts "#{ind2}#{litOrNot(term)} => [#{list}]," - end - io.puts "#{ind1}}," - end - io.puts "#{ind0}}\n" - end - - def outputRegexpTable(io, indent = 0) - ind0 = ' ' * indent - ind1 = ind0 + ' ' - io.puts "#{ind0}REGEXPS = {" - tokenRegexps.keys.sort_by(&:to_s).each do |prod| - # Special case double, integer, and decimal to output just a numericliteral, due to a parser conflict - next if prod.to_s =~ /(integer|double|decimal)/ - io.puts "#{ind1}#{litOrNot(prod)} => Regexp.compile(" + - case prod.to_s - when /barename/ then %q(%(^[#{BARENAME_START}][#{BARENAME_TAIL}]*)) - when /explicituri/ then %q("^<[^>]*>") - when /langcode/ then %q("^[a-zA-Z]+(-[a-zA-Z0-9]+)*") - when /prefix/ then %q(%(^([#{BARENAME_START}][#{BARENAME_TAIL}]*)?:)) - when /qname/ then %q(%(^(([#{BARENAME_START}][#{BARENAME_TAIL}]*)?:)?([#{BARENAME_START}][#{BARENAME_TAIL}]*)?)) - when /variable/ then %q(%(^\\\\?[#{BARENAME_START}][#{BARENAME_TAIL}]*)) - else tokenRegexps[prod].dump - end + ")," - end - - io.puts "\n#{ind1}# Hack to replace integer|double|decimal with numericliteral" - io.puts "#{ind1}#{litOrNot(N3.numericliteral)} => Regexp.compile(" + %q(%(^[-+]?[0-9]+(\\\\.[0-9]+)?(e[-+]?[0-9]+)?)) + ")" - io.puts "#{ind0}}\n" - end -end - -$verbose = false -$debug = false -grammarFile = File.expand_path(File.join(File.dirname(__FILE__), "../lib/rdf/n3/reader/n3-selectors.n3")) -start = N3.document -output = STDOUT - -opts = GetoptLong.new( - ["--debug", GetoptLong::NO_ARGUMENT], - ["--verbose", GetoptLong::NO_ARGUMENT], - ["--grammar", GetoptLong::REQUIRED_ARGUMENT], - ["--start", GetoptLong::REQUIRED_ARGUMENT], - ["--output", "-o", GetoptLong::REQUIRED_ARGUMENT], - ["--help", "-?", GetoptLong::NO_ARGUMENT] -) -opts.each do |opt, arg| - case opt - when '--verbose' then $verbose = true - when '--debug' then $debug = true - when '--grammar' then grammarFile = arg - when '--as' then parseAs = arg - when '--output' then output = File.open(arg, "w") - when '--help' - puts %(Usage: build_meta --grammar=file --as=uri [--output=file] - --grammar=file This is the RDF augmented grammar - --start=uri This is the URI of the production as which the document - is to be parsed - --output=file Where to save output -) - exit(0) - end -end - -pp = PredictiveParser.new - -pp.parse(grammarFile) -pp.runProduction(start) - -unless output == STDOUT - output.puts "# This file is automatically generated by #{__FILE__}" - output.puts "# Branch and Regexp tables derived from #{grammarFile}" - output.puts "module RDF::N3::Meta" -end -pp.outputBranchTable(output, 1) -output.puts %q( - if RUBY_VERSION >= "1.9.0" - BARENAME_START = "A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd\u{10000}-\u{effff}" - BARENAME_TAIL = "0-9#{BARENAME_START}\u00b7\u0300-\u036f\u203f-\u2040\\\\-" - else - BARENAME_START = "A-Z_a-z\xc0-\xd6\xd8-\xf6\xf8-\xff" - BARENAME_TAIL = "0-9#{BARENAME_START}\xb7\\\\-" - end -) -pp.outputRegexpTable(output, 1) -unless output == STDOUT - output.puts "end" -end diff --git a/script/parse b/script/parse index cedfa88..a803aeb 100755 --- a/script/parse +++ b/script/parse @@ -11,6 +11,8 @@ require 'open-uri' def run(input, **options) require 'profiler' if options[:profile] + parser_options = options[:parser_options] + parser_options[:base_uri] ||= File.expand_path(input.path) if input.respond_to?(:path) reader_class = RDF::Reader.for(options[:input_format].to_sym) raise "Reader not found for #{options[:input_format]}" unless reader_class @@ -18,12 +20,13 @@ def run(input, **options) start = Time.new num = 0 Profiler__::start_profile if options[:profile] - if options[:think] + if options[:think] || options[:rules] + STDERR.puts "Reason" if $verbose # Parse into a new reasoner and evaluate - reader_class.new(input, options[:parser_options].merge(logger: nil)) do |reader| - reasoner = RDF::N3::Reasoner.new(reader, options[:parser_options]) - reasoner.reason!(options) - repo = RDF::Repository.new + reader_class.new(input, **parser_options.merge(logger: nil)) do |reader| + repo = RDF::N3::Repository.new + reasoner = RDF::N3::Reasoner.new(reader, **parser_options) + reasoner.reason!(**options) if options[:conclusions] repo << reasoner.conclusions elsif options[:data] @@ -32,10 +35,19 @@ def run(input, **options) repo << reasoner end num = repo.count - options[:output].puts repo.dump(options[:output_format], prefixes: reader.prefixes, standard_prefixes: true, logger: options[:logger]) + if options[:strings] + options[:output].puts reasoner.strings + else + options[:output].puts repo.dump(options[:output_format], + prefixes: reader.prefixes, + base_uri: reader.base_uri, + standard_prefixes: true, + logger: options[:logger]) + end end elsif options[:output_format] == :ntriples || options[:quiet] - reader_class.new(input, options[:parser_options]).each do |statement| + STDERR.puts "Parse nt/quiet" if $verbose + reader_class.new(input, **parser_options).each do |statement| num += 1 if options[:errors] && statement.invalid? $stderr.puts "Invalid statement at #{r.lineno}: #{statement.inspect}" @@ -46,20 +58,32 @@ def run(input, **options) end end elsif options[:output_format] == :sxp - reader_class.new(input, options[:parser_options]) do |reader| + STDERR.puts "Parse to SXP" if $verbose + reader_class.new(input, **parser_options) do |reader| reasoner = RDF::N3::Reasoner.new(reader) SXP::Generator.print(reasoner.to_sxp_bin) end elsif options[:output_format] == :inspect - reader_class.new(input, options[:parser_options]).each do |statement| + STDERR.puts "Parse to inspect" if $verbose + reader_class.new(input, **parser_options).each do |statement| num += 1 options[:output].puts statement.inspect end else - reader = reader_class.new(input, options[:parser_options]) - repo = RDF::Repository.new << reader + STDERR.puts "Parse to #{options[:output_format]}" if $verbose + reader = reader_class.new(input, **parser_options) + repo = [].extend(RDF::Enumerable, RDF::Queryable) + reader.each_statement {|st| repo << st} num = repo.count - options[:output].puts repo.dump(options[:output_format], prefixes: reader.prefixes, standard_prefixes: true, logger: options[:logger]) + if options[:output_format] == :n3 + # Extra debugging + options[:logger].debug SXP::Generator.string(repo.to_sxp_bin).strip + end + options[:output].puts repo.dump(options[:output_format], + prefixes: reader.prefixes, + base_uri: reader.base_uri, + standard_prefixes: true, + logger: options[:logger]) end if options[:profile] Profiler__::stop_profile @@ -68,29 +92,31 @@ def run(input, **options) puts secs = Time.new - start puts "Parsed #{num} statements in #{secs} seconds @ #{num/secs} statements/second." +rescue RDF::ReaderError => e + STDERR.puts "Backtrace: " + e.backtrace.join("\n ") if $verbose + exit(1) rescue Exception => e - fname = input.respond_to?(:path) ? input.path : "-stdin-" - STDERR.puts("Error in #{fname}: #{e.message}") + STDERR.puts "Error: #{e}" STDERR.puts "Backtrace: " + e.backtrace.join("\n ") - raise e + exit(1) end logger = Logger.new(STDERR) logger.level = Logger::WARN -logger.formatter = lambda {|severity, datetime, progname, msg| "#{severity}: #{msg}\n"} +logger.formatter = lambda {|severity, datetime, progname, msg| "%5s %s\n" % [severity, msg]} parser_options = { - base_uri: "http://example.com", + list_terms: true, logger: logger, validate: false, } options = { - parser_options: parser_options, + input_format: :n3, logger: logger, output: STDOUT, output_format: :n3, - input_format: :n3, + parser_options: parser_options, } input = nil @@ -106,14 +132,16 @@ OPT_ARGS = [ ["--help", "-?", GetoptLong::NO_ARGUMENT, "print this message"], ["--input-format", GetoptLong::REQUIRED_ARGUMENT, "Format of the input file, defaults to n3"], ["--info", GetoptLong::NO_ARGUMENT, "Show progress on execution"], + ["--no-list-terms", GetoptLong::NO_ARGUMENT, "Use first/rest chain for lists"], ["--output", "-o", GetoptLong::REQUIRED_ARGUMENT, "Save output to file"], ["--profile", GetoptLong::NO_ARGUMENT, "Show an execution profile"], ["--quiet", GetoptLong::NO_ARGUMENT, "Do not show parser output"], ["--rules", GetoptLong::NO_ARGUMENT, "Run rules adding to the store"], + ["--strings", GetoptLong::NO_ARGUMENT, "Dump :s to stdout ordered by :k whereever { :k log:outputString :s }"], ["--think", GetoptLong::NO_ARGUMENT, "Run rules until until no more triples generated"], ["--uri", GetoptLong::REQUIRED_ARGUMENT, "Default base URI"], ["--validate", GetoptLong::NO_ARGUMENT, "Run parser in strict validation mode"], - #["--verbose", GetoptLong::NO_ARGUMENT, "Verbose output"], + ["--verbose", GetoptLong::NO_ARGUMENT, "Verbose output"], ] def usage @@ -146,20 +174,24 @@ opts.each do |opt, arg| when "--data" then options[:data] = true when '--debug' then logger.level = Logger::DEBUG when '--errors' then options[:errors] = true - when '--execute' then input = arg + when '--execute' + options[:base_uri] = "http://example.com/", + input = arg when '--format' then options[:output_format] = arg.to_sym when "--help" then usage() when '--info' then logger.level = Logger::INFO when '--input-format' then options[:input_format] = arg.to_sym + when '--no-list-terms' then parser_options[:list_terms] = false when '--output' then options[:output] = File.open(arg, "w") when '--profile' then options[:profile] = true when '--quiet' options[:quiet] = true logger.level = Logger::FATAL when '--rules' then options[:rules] = true + when '--strings' then options[:strings] = true when '--think' then options[:think] = true when '--uri' then parser_options[:base_uri] = arg - when '--validate' then parser_options[:debug] ||= 1 + when '--validate' then parser_options[:validate] = true when '--verbose' then $verbose = true end end diff --git a/script/tc b/script/tc new file mode 100755 index 0000000..b69f7e4 --- /dev/null +++ b/script/tc @@ -0,0 +1,278 @@ +#!/usr/bin/env ruby +require 'rubygems' +$:.unshift(File.expand_path("../../lib", __FILE__)) +require "bundler/setup" +require 'logger' +require 'rdf/turtle' +require 'rdf/isomorphic' +require File.expand_path("../../spec/spec_helper", __FILE__) +require File.expand_path("../../spec/suite_helper", __FILE__) +require 'getoptlong' + +ASSERTOR = "https://greggkellogg.net/foaf#me" +RUN_TIME = Time.now + +def earl_preamble(**options) + options[:output].write File.read(File.expand_path("../../etc/doap#{'-ntriples' if options[:ntriples]}.ttl", __FILE__)) + options[:output].puts %( +<> foaf:primaryTopic ; + dc:issued "#{RUN_TIME.xmlschema}"^^xsd:dateTime ; + foaf:maker <#{ASSERTOR}> . + +<#{ASSERTOR}> a foaf:Person, earl:Assertor; + foaf:name "Gregg Kellogg"; + foaf:title "Implementor"; + foaf:homepage . +) + + options[:output].puts %( + + doap:release . + + a doap:Version; + doap:name "rdf-n3-#{RDF::N3::VERSION}"; + doap:created "#{File.mtime(File.expand_path('../../VERSION', __FILE__)).strftime('%Y-%m-%d')}"^^xsd:date; + doap:revision "#{RDF::N3::VERSION}" . +) +end + +def run_tc(man, tc, **options) + case options[:type] + when :parser + return if tc.reason? + when :reasoner + return unless tc.reason? + end + + STDERR.write "test #{tc.name} " + + if options[:verbose] + STDERR.puts "\nTestCase: #{tc.inspect}" + STDERR.puts "\nInput:\n" + tc.input + STDERR.puts "\nExpected:\n" + tc.expected if tc.result && tc.positive_test? + end + + return if tc.approval == "rdft:Rejected" + + logger = options[:live] ? Logger.new(STDERR) : RDF::Spec.logger + logger.level = options[:level] + logger.formatter = lambda {|severity, datetime, progname, msg| "%5s %s\n" % [severity, msg]} + + start = Time.now + + begin + STDERR.puts "open #{tc.action}" if options[:verbose] + options = { + validate: true, + logger: logger + }.merge(options) + + reader_options = options.dup + reader_options[:logger] = false if tc.reason? + reader = RDF::N3::Reader.open(tc.action, **reader_options) + + graph = RDF::N3::Repository.new + result = nil + + if !options[:slow] && tc.slow? + result = "untested" + elsif tc.positive_test? + begin + graph << reader + rescue Exception => e + if options[:verbose] + STDERR.puts "Unexpected exception: #{e.inspect}\n#{e.backtrace.join("\n")}" + end + result = "failed" + end + else + begin + graph << reader + STDERR.puts "Expected exception" if options[:verbose] + result = "failed" + rescue RDF::ReaderError + result = "passed" + end + end + + secs = Time.new - start + + if tc.evaluate? && result.nil? + begin + result_repo = RDF::N3::Repository.load(tc.result) + result = graph.isomorphic_with?(result_repo) ? "passed" : "failed" + rescue Exception => e + if options[:verbose] + STDERR.puts "Unexpected exception: #{e.inspect}\n#{e.backtrace.join("\n")}" + end + result = "failed" + end + elsif tc.reason? && result.nil? + reasoner = RDF::N3::Reasoner.new(graph, **options) + + repo = RDF::N3::Repository.new + + begin + reasoner.execute(logger: logger, think: !!tc.options['think']) + if tc.options["conclusions"] + repo << reasoner.conclusions + elsif tc.options["data"] + repo << reasoner.data + else + repo << reasoner + end + rescue Exception => e + if options[:verbose] + STDERR.puts "Unexpected exception: #{e.inspect}\n#{e.backtrace.join("\n")}" + end + result = "failed" + end + if options[:verbose] + if tc.options["strings"] + STDERR.puts "\nResult: #{reasoner.strings}" + else + STDERR.puts "\nResult: #{repo.dump(:n3, base_uri: tc.base, standard_prefixes: true)}" + end + end + + if tc.options["strings"] + result = reasoner.strings == tc.expected + else + result_repo = RDF::N3::Repository.load(tc.result) + + # Check against expanded triples from repo + expanded_repo = RDF::Repository.new do |r| + repo.each_expanded_statement do |st| + r << st + end + end + + result = expanded_repo.isomorphic_with?(result_repo) ? "passed" : "failed" + end + else + result ||= "passed" + end + + rescue Exception => e + STDERR.puts "#{"exception:" unless options[:quiet]}: #{e}" + return if options[:quiet] + STDERR.puts "Backtrace: " + e.backtrace.join("\n ") if $verbose + end + + STDERR.puts options[:logger] if options[:verbose] && !options[:live] + + if options[:earl] + options[:output].puts %{ +[ a earl:Assertion; + earl:assertedBy <#{ASSERTOR}>; + earl:subject ; + earl:test <#{tc.id}>; + earl:result [ + a earl:TestResult; + earl:outcome earl:#{result}; + dc:date "#{RUN_TIME.xmlschema}"^^xsd:dateTime]; + earl:mode earl:automatic ] . +} + end + + options[:results][result] ||= 0 + options[:results][result] += 1 + + STDERR.puts "#{"test result:" unless options[:quiet]} #{result} #{"(#{secs} seconds)" unless options[:quiet] || secs < 1}." +end + +options = { + level: Logger::WARN, + list_terms: true, + output: STDOUT, + results: {}, + slow: true, # Run slow tests by default + type: :all +} + +OPT_ARGS = [ + ["--earl", GetoptLong::NO_ARGUMENT, "Generate EARL report"], + ["--debug", GetoptLong::NO_ARGUMENT, "Debugging output"], + ["--help", "-?", GetoptLong::NO_ARGUMENT, "print this message"], + ["--info", GetoptLong::NO_ARGUMENT, "Show progress on execution"], + ["--live", GetoptLong::NO_ARGUMENT, "Show live parsing results, not buffered"], + ["--output", "-o", GetoptLong::REQUIRED_ARGUMENT, "Output to specified file"], + ["--quiet", "-q", GetoptLong::NO_ARGUMENT, "Minimal output"], + ["--skip-slow", "-s", GetoptLong::NO_ARGUMENT, "Avoid files taking too much time"], + ["--type", GetoptLong::REQUIRED_ARGUMENT, "Test type (`parser`, `reasoner`, or `all`)"], + ["--validate", GetoptLong::NO_ARGUMENT, "Validate input"], + ["--verbose", "-v", GetoptLong::NO_ARGUMENT, "Verbose output"], +] + +def usage + STDERR.puts %{ + n3 version #{RDF::N3::VERSION} + Run N3 tests. + + Usage: #{$0} [options] [test-number ...] + }.gsub(/^ /, '') + width = OPT_ARGS.map do |o| + l = o.first.length + l += o[1].length + 2 if o[1].is_a?(String) + l + end.max + OPT_ARGS.each do |o| + s = " %-*s " % [width, (o[1].is_a?(String) ? "#{o[0,2].join(', ')}" : o[0])] + s += o.last + STDERR.puts s + end + exit(1) +end + +opts = GetoptLong.new(*OPT_ARGS.map {|o| o[0..-2]}) + +opts.each do |opt, arg| + case opt + when '--help' then usage() + when '--debug' then options[:level] = Logger::DEBUG + when '--earl' + options[:quiet] = options[:earl] = true + options[:level] = Logger::FATAL + when '--info' then options[:level] = Logger::INFO + when '--live' then options[:live] = true + when '--output' then options[:output] = File.open(arg, "w") + when '--quiet' + options[:quiet] = true + options[:level] = Logger::FATAL + when '--skip-slow' then options[:slow] = false + when '--type' + unless %w(parser reasoner all).include?(arg) + STDERR.puts "unknown test type: #{options[:type]}" + help(**options) + end + options[:type] = arg.to_sym + when '--validate' then options[:validate] = true + when '--verbose' then options[:verbose] = true + end +end + +manifests = %w( + tests/N3Tests/manifest.ttl + tests/TurtleTests/manifest.ttl + tests/N3Tests/manifest-reasoner.ttl + tests/N3Tests/manifest-extended.ttl +).map {|r| "https://w3c.github.io/N3/#{r}"} + +earl_preamble(**options) if options[:earl] + +begin + manifests.each do |man| + Fixtures::SuiteTest::Manifest.open(man) do |m| + m.entries.each do |tc| + next unless ARGV.empty? || ARGV.any? {|n| tc.property('@id').match?(/#{n}/) || tc.property('action').match?(/#{n}/)} + run_tc(man, tc, **options.merge(list_terms: !man.include?("TurtleTests"))) + end + end + end +rescue Interrupt => e + STDERR.puts "(interrupt)" + STDERR.puts "Backtrace: " + e.backtrace.join("\n ") if $verbose + exit 1 +end + +options[:results].each {|k, v| puts "#{k}: #{v}"} diff --git a/spec/.gitignore b/spec/.gitignore index a4cc2ca..55f2aae 100644 --- a/spec/.gitignore +++ b/spec/.gitignore @@ -1 +1,2 @@ -/uri-cache/ +/w3c-n3 +/w3c-rdf diff --git a/spec/extensions_spec.rb b/spec/extensions_spec.rb new file mode 100644 index 0000000..50772c6 --- /dev/null +++ b/spec/extensions_spec.rb @@ -0,0 +1,109 @@ +# coding: utf-8 +require_relative 'spec_helper' + +describe RDF::List do + let(:constant) {RDF::List[RDF::URI("A"), RDF::URI("B")]} + let(:nodes) {RDF::List[RDF::Node.new("a"), RDF::Node.new("b")]} + let(:vars) {RDF::List[RDF::Query::Variable.new("a"), RDF::Query::Variable.new("b")]} + + describe "#variable?" do + context "constant" do + subject {constant} + specify {is_expected.not_to be_variable} + specify {is_expected.to be_constant} + end + + context "nodes" do + subject {nodes} + specify {is_expected.not_to be_variable} + specify {is_expected.to be_constant} + end + + context "vars" do + subject {vars} + specify {is_expected.to be_variable} + specify {is_expected.not_to be_constant} + end + end +end + +describe RDF::Value do + describe "#formula?" do + { + RDF::Node.new("a") => false, + RDF::Literal.new("a") => false, + RDF::URI("a") => false, + RDF::Graph.new => false, + RDF::List[RDF::URI("a")] => false, + RDF::Statement.new(RDF::URI("s"), RDF::URI("p"), RDF::URI("o")) => false, + RDF::N3::Algebra::Formula.new => true + }.each do |term, is_formula| + context term.class.to_s do + if is_formula + specify {expect(term).to be_formula} + else + specify {expect(term).not_to be_formula} + end + end + end + end +end + +describe RDF::Term do + describe "#sameTerm?" do + { + "lita lita": [RDF::Literal.new("a"), RDF::Literal.new("a"), true], + "lita litb": [RDF::Literal.new("a"), RDF::Literal.new("b"), false], + "lita nodea": [RDF::Literal.new("a"), RDF::Node.intern("a"), false], + "lita uria": [RDF::Literal.new("a"), RDF::URI("a"), false], + "lita vara": [RDF::Literal.new("a"), RDF::Query::Variable.new("a"), false], + + "nodea nodea": [RDF::Node.intern("a"), RDF::Node.intern("a"), true], + "nodea nodeb": [RDF::Node.intern("a"), RDF::Node.intern("b"), false], + + "uria uria": [RDF::URI("a"), RDF::URI("a"), true], + "uria urib": [RDF::URI("a"), RDF::URI("b"), false], + + "vara vara": [RDF::Query::Variable.new("a"), RDF::Query::Variable.new("a"), true], + "vara varb": [RDF::Query::Variable.new("a"), RDF::Query::Variable.new("b"), false], + }.each do |term, (a, b, tf)| + context term do + if tf + specify {expect(a).to be_sameTerm(b)} + else + specify {expect(a).not_to be_sameTerm(b)} + end + end + end + end +end + +describe RDF::Node do + describe "#evaluate" do + let(:node) {RDF::Node.intern("a")} + + it "returns itself if not bound" do + expect(node.evaluate({}, formulae: {})).to eq node + end + end +end + +describe RDF::Query::Pattern do + describe "#eql?" do + let(:stmt1) {RDF::Statement.new(RDF::N3::List[RDF::URI("a"), RDF::URI("b")], RDF::URI("p"), RDF::N3::List[RDF::URI("d"), RDF::URI("e")])} + let(:stmt2) {RDF::Statement.new(RDF::N3::List[RDF::URI("a"), RDF::URI("b")], RDF::URI("p"), RDF::URI("o"))} + let(:pat1) {RDF::Query::Pattern.new(RDF::N3::List[RDF::URI("a"), RDF::URI("b")], RDF::URI("p"), RDF::N3::List[RDF::URI("d"), RDF::URI("e")])} + + it "equals itself" do + expect(pat1).to eql pat1 + end + + it "equals matching statement" do + expect(pat1).to eql stmt1 + end + + it "does not equal non-matching statement" do + expect(pat1).not_to eql stmt2 + end + end +end diff --git a/spec/list_spec.rb b/spec/list_spec.rb new file mode 100644 index 0000000..280f9dd --- /dev/null +++ b/spec/list_spec.rb @@ -0,0 +1,643 @@ +# coding: utf-8 +require_relative 'spec_helper' + +describe RDF::N3::List do + + let(:empty) {RDF::List::NIL} + let(:abc) {described_class[RDF::Literal.new('a'), RDF::Literal.new('b'), RDF::Literal.new('c')]} + let(:nodes) {described_class[RDF::Node.new('a'), RDF::Node.new('b'), RDF::Node.new('c')]} + let(:ten) {described_class[*(1..10)]} + let(:pattern) {described_class[RDF::Query::Variable.new("a"), RDF::Query::Variable.new("b"), RDF::Literal("c")]} + + describe ".to_uri" do + specify {expect(described_class.to_uri).to eq RDF::N3::List::URI} + end + + describe "vocabulary accessors" do + specify {expect(described_class.append).to be_a(RDF::Vocabulary::Term)} + specify {expect(described_class.append).to eql RDF::N3::List::URI.+("append")} + end + + describe ".try_list" do + end + + describe "[]" do + context "without arguments" do + it "constructs a new empty list" do + expect(described_class[]).to be_an(described_class) + expect(described_class[]).to be_empty + expect(described_class[]).to eq RDF::List::NIL + end + end + + context "with arguments" do + it "constructs a new non-empty list" do + expect(described_class[1, 2, 3]).to be_an(described_class) + expect(described_class[1, 2, 3]).not_to be_empty + end + + it "accepts list arguments" do + expect { described_class[described_class[]] }.not_to raise_error + end + + it "accepts blank node arguments" do + expect { described_class[RDF::Node.new] }.not_to raise_error + end + + it "accepts URI arguments" do + expect { described_class[RDF.nil] }.not_to raise_error + end + + it "accepts nil arguments" do + expect { described_class[nil] }.not_to raise_error + end + + it "accepts literal arguments" do + expect { described_class[RDF::Literal.new("Hello, world!", language: :en)] }.not_to raise_error + end + + it "accepts boolean arguments" do + expect { described_class[true, false] }.not_to raise_error + end + + it "accepts string arguments" do + expect { described_class["foo", "bar"] }.not_to raise_error + end + + it "accepts integer arguments" do + expect { described_class[1, 2, 3] }.not_to raise_error + end + end + end + + describe "#initialize" do + context "with subject and graph" do + let(:graph) {RDF::Graph.new} + it "initializes pre-existing list" do + n = RDF::Node.new + graph.insert(RDF::Statement(n, RDF.first, "foo")) + graph.insert(RDF::Statement(n, RDF.rest, RDF.nil)) + described_class.new(subject: n, graph: graph).valid? + expect(described_class.new(subject: n, graph: graph)).to be_valid + end + end + + context "without subject or graph" do + end + + context "with subject, graph and no values" do + end + + context "with subject and values" do + end + end + + describe "#[]" do + it "accepts one argument" do + expect { empty[0] }.not_to raise_error + end + + it "rejects fewer arguments" do + expect { empty.__send__(:[]) }.to raise_error(ArgumentError) + end + + it "returns a value for valid indexes" do + expect(ten[0]).to be_a_value + end + + it "returns nil for invalid indexes" do + expect(empty[0]).to be_nil + expect(ten[20]).to be_nil + end + + context "with start index and a length" do + it "accepts two arguments" do + expect { ten[0, 9] }.not_to raise_error + end + + it "returns a value" do + expect(ten[0, 9]).to be_a_value + end + end + + context "with a range" do + it "accepts one argument" do + expect { ten[0..9] }.not_to raise_error + end + end + end + + describe "#[]=" do + it "accepts one integer argument" do + expect { ten[0] = 0 }.not_to raise_error + end + + it "accepts two integer arguments" do + expect { ten[0, 0] = 0 }.not_to raise_error + end + + it "accepts a range argument" do + expect { ten[0..1] = 0 }.not_to raise_error + end + + it "rejects fewer arguments" do + expect { ten[] = 0 }.to raise_error(ArgumentError) + end + + it "rejects extra arguments" do + expect { ten[0, 1, 2] = 0 }.to raise_error(ArgumentError) + end + + context "with index" do + it "rejects string index" do + expect { ten["1"] = 0 }.to raise_error(ArgumentError) + end + + { + "a[4] = '4'" => { + initial: [], + index: 4, + value: "4", + result: [nil, nil, nil, nil, "4"] + }, + "a[-1] = 'Z'" => { + initial: ["A", "4"], + index: -1, + value: "Z", + result: ["A", "Z"] + }, + }.each do |name, props| + it name do + list = described_class[*props[:initial]] + list[props[:index]] = props[:value] + expect(list).to eq described_class[*props[:result]] + end + end + end + + context "with start and length" do + { + "a[0, 3] = [ 'a', 'b', 'c' ]" => { + initial: [nil, nil, nil, nil, "4"], + start: 0, + length: 3, + value: [ 'a', 'b', 'c' ], + result: ["a", "b", "c", nil, "4"] + }, + "a[0, 2] = '?'" => { + initial: ["a", 1, 2, nil, "4"], + start: 0, + length: 2, + value: "?", + result: ["?", 2, nil, "4"] + }, + "a[0, 0] = [ 1, 2 ]" => { + initial: ["A"], + start: 0, + length: 0, + value: [ 1, 2 ], + result: [1, 2, "A"] + }, + "a[3, 0] = 'B'" => { + initial: [1, 2, "A"], + start: 3, + length: 0, + value: "B", + result: [1, 2, "A", "B"] + }, + "lorem[0, 5] = []" => { + initial: ['lorem' 'ipsum' 'dolor' 'sit' 'amet'], + start: 0, + length: 5, + value: [], + result: [] + }, + }.each do |name, props| + it name do + list = described_class[*props[:initial]] + list[props[:start], props[:length]] = props[:value] + expect(list).to eq described_class[*props[:result]] + end + end + + it "sets subject to rdf:nil when list is emptied" do + list = described_class[%(lorem ipsum dolor sit amet)] + list[0,5] = [] + expect(list).to eq described_class[] + expect(list.subject).to eq RDF.nil + end + end + + context "with range" do + { + "a[1..2] = [ 1, 2 ]" => { + initial: ["a", "b", "c", nil, "4"], + range: (1..2), + value: [ 1, 2 ], + result: ["a", 1, 2, nil, "4"] + }, + "a[0..2] = 'A'" => { + initial: ["?", 2, nil, "4"], + range: (0..2), + value: "A", + result: ["A", "4"] + }, + "a[1..-1] = nil" => { + initial: ["A", "Z"], + range: (1..-1), + value: nil, + result: ["A", nil] + }, + "a[1..-1] = []" => { + initial: ["A", nil], + range: (1..-1), + value: [], + result: ["A"] + }, + }.each do |name, props| + it name do + list = described_class[*props[:initial]] + list[props[:range]] = props[:value] + expect(list).to eq described_class[*props[:result]] + end + end + end + end + + describe "#<<" do + it "accepts one argument" do + expect { ten << 11 }.not_to raise_error + end + + it "rejects fewer arguments" do + expect { ten.__send__(:<<) }.to raise_error(ArgumentError) + end + + it "appends the new value at the tail of the list" do + ten << 11 + expect(ten.last).to eq RDF::Literal.new(11) + end + + it "increments the length of the list by one" do + ten << 11 + expect(ten.length).to eq 11 + end + + it "returns self" do + expect(ten << 11).to equal(ten) + end + end + + describe "#shift" do + it "returns the first element from the list" do + expect(ten.shift).to eq RDF::Literal.new(1) + end + + it "removes the first element from the list" do + ten.shift + expect(ten).to eq described_class[2, 3, 4, 5, 6, 7, 8, 9, 10] + end + + it "should return nil from an empty list" do + expect(empty.shift).to be_nil + end + end + + describe "#unshift" do + it "adds element to beginning of list" do + ten.unshift(0) + expect(ten).to eq described_class[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + end + + it "should return the new list" do + expect(ten.unshift(0)).to eq described_class[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + end + end + + describe "#clear" do + it "empties list" do + expect(ten.clear).to eq described_class[] + end + end + + describe "#eql?" do + it "requires an argument" do + expect { empty.send(:eql?) }.to raise_error(ArgumentError) + end + + it "returns true when given the same list" do + expect(ten).to eql ten + end + + it "returns true when comparing a list to its contents" do + expect(ten).to eql ten.to_a + end + + it "does not equal different list" do + expect(abc).not_to eql ten + end + + it "pattern matches list" do + expect(pattern).to eql abc + end + + it "does not match of different size" do + expect(pattern).not_to eql ten + end + end + + describe "#empty?" do + it "requires no arguments" do + expect { empty.empty? }.not_to raise_error + end + + it "returns a boolean" do + expect(empty).to be_empty + expect(abc).not_to be_empty + expect(ten).not_to be_empty + end + end + + describe "#length" do + it "requires no arguments" do + expect { empty.length }.not_to raise_error + end + + it "returns an integer" do + expect(empty.length).to be_an(Integer) + end + + it "returns the length of the list" do + expect(empty.length).to eq 0 + expect(abc.length).to eq 3 + expect(ten.length).to eq 10 + end + end + + describe "#size" do + it "aliases #length" do + expect(empty.size).to eq empty.length + expect(ten.size).to eq ten.length + end + end + + describe "#index" do + it "accepts one argument" do + expect { ten.index(nil) }.not_to raise_error + end + end + + describe "#fetch" do + it "requires one argument" do + expect { ten.fetch }.to raise_error(ArgumentError) + expect { ten.fetch(0) }.not_to raise_error + end + + it "returns a value" do + expect(ten.fetch(0)).to be_a_value + end + + it "returns the value at the given index" do + expect(ten.fetch(0)).to eq RDF::Literal.new(1) + expect(ten.fetch(9)).to eq RDF::Literal.new(10) + end + + it "raises IndexError for invalid indexes" do + expect { ten.fetch(20) }.to raise_error(IndexError) + end + + describe "with a default value" do + it "accepts two arguments" do + expect { ten.fetch(0, nil) }.not_to raise_error + end + + it "returns the second argument for invalid indexes" do + expect { ten.fetch(20, nil) }.not_to raise_error + expect(ten.fetch(20, true)).to eq true + end + end + + describe "with a block" do + it "yields to the given block for invalid indexes" do + expect { ten.fetch(20) { |index| } }.not_to raise_error + expect(ten.fetch(20) { |index| true }).to be_truthy + end + end + end + + describe "#at" do + it "accepts one argument" do + expect { ten.at(0) }.not_to raise_error + end + end + + describe "#last" do + it "requires no arguments" do + expect { ten.last }.not_to raise_error + end + end + + describe "#rest" do + it "requires no arguments" do + expect { ten.rest }.not_to raise_error + end + end + + describe "#tail" do + it "requires no arguments" do + expect { ten.tail }.not_to raise_error + end + end + + describe "#each_subject" do + describe "without a block" do + it "requires no arguments" do + expect { ten.each_subject }.not_to raise_error + end + + it "returns an enumerator" do + expect(abc.each_subject).to be_an_enumerator + end + end + + describe "with a block" do + it "requires no arguments" do + expect { ten.each_subject { |subject| } }.not_to raise_error + end + + it "yields all subject terms in the list" do + expect {|b| ten.each_subject(&b)}.to yield_control.exactly(10).times + end + end + end + + describe "#each" do + describe "without a block" do + it "requires no arguments" do + expect { ten.each }.not_to raise_error + end + + it "returns an enumerator" do + expect(abc.each_subject).to be_an_enumerator + end + end + + describe "with a block" do + it "requires no arguments" do + expect { ten.each { |value| } }.not_to raise_error + end + + it "yields the correct number of times" do + expect(abc.each.count).to eq 3 + expect(ten.each.count).to eq 10 + end + end + end + + describe "#each_statement" do + describe "without a block" do + it "requires no arguments" do + expect { ten.each_statement }.not_to raise_error + end + + it "returns an enumerator" do + expect(abc.each_subject).to be_an_enumerator + end + end + + describe "with a block" do + it "requires no arguments" do + expect { ten.each_statement { |statement| } }.not_to raise_error + end + + it "yields the correct number of times" do + expect(abc.each_statement.count).to eq 3 * 2 + expect(ten.each_statement.count).to eq 10 * 2 + end + + it "yields statements" do + expect {|b| ten.each_statement(&b)}.to yield_control.at_least(10).times + ten.each_statement do |statement| + expect(statement).to be_a_statement + end + end + end + + describe "with embedded statement" do + subject {RDF::N3::List['a', RDF::N3::List['b'], 'c']} + + it "yields the correct number of times" do + expect(subject.each_statement.count).to eq 8 + end + + it "does not include statements with embedded lists" do + statements = subject.each_statement.to_a + entries = statements.select {|st| st.predicate == RDF.first}.map(&:object) + entries.each do |e| + expect(e).not_to be_list + end + end + end + end + + describe "#has_nodes?" do + it "finds list with nodes" do + expect(nodes).to have_nodes + end + + it "rejects list with nodes" do + expect(abc).not_to have_nodes + end + end + + describe "#to_ndvar" do + it "creates existential vars for list having nodes" do + expect(nodes.to_ndvar(RDF::Node.new)).to all(be_variable) + end + end + + describe "#variable?" do + it "rejects list with nodes" do + expect(nodes).not_to be_variable + end + + it "rejects list with URIs" do + expect(abc).not_to be_variable + end + + it "finds list with existentials" do + expect(nodes.to_ndvar(RDF::Node.new)).to be_variable + end + end + + describe "#variables" do + it "finds no variables in constant list" do + expect(abc.variables).to be_empty + end + + it "finds no variables in node list" do + expect(nodes.variables).to be_empty + end + + it "finds variables in existential list" do + expect(nodes.to_ndvar(RDF::Node.new).variables).to all(be_variable) + end + end + + describe "#var_values" do + it "returns an empty array with constant pattern" do + pattern = described_class.new(values: %w(a b c)) + list = described_class.new(values: %w(a b c)) + expect(pattern.var_values(:x, list)).to be_empty + end + + it "returns an empty array with no matching variable" do + pattern = described_class.new(values: [RDF::Query::Variable.new(:a), RDF::Query::Variable.new(:b), RDF::Query::Variable.new(:c)]) + list = described_class.new(values: %w(a b c)) + expect(pattern.var_values(:x, list)).to be_empty + end + + it "returns matching value" do + pattern = described_class.new(values: [RDF::Query::Variable.new(:a), RDF::Query::Variable.new(:b), RDF::Query::Variable.new(:c)]) + list = described_class.new(values: %w(a b c)) + expect(pattern.var_values(:a, list)).to include(RDF::Literal('a')) + end + + it "returns matching values when multiple" do + pattern = described_class.new(values: [RDF::Query::Variable.new(:a), RDF::Query::Variable.new(:a), RDF::Query::Variable.new(:a)]) + list = described_class.new(values: %w(a b c)) + expect(pattern.var_values(:a, list)).to include(RDF::Literal('a'), RDF::Literal('b'), RDF::Literal('c')) + end + + it "returns matching values recursively" do + pattern = described_class.new(values: [ + RDF::Query::Variable.new(:a), + described_class.new(values: [RDF::Query::Variable.new(:a)]), + RDF::Query::Variable.new(:a)]) + list = described_class.new(values: ["a", described_class.new(values: ["b"]), "c"]) + expect(pattern.var_values(:a, list)).to include(RDF::Literal('a'), RDF::Literal('b'), RDF::Literal('c')) + end + end + + describe "#evaluate" do + let(:constant) {RDF::N3::List[RDF::URI("A"), RDF::URI("B")]} + let(:nodes) {described_class[RDF::Node.new('a'), RDF::Node.new('b')]} + let(:vars) {RDF::N3::List[RDF::Query::Variable.new("a"), RDF::Query::Variable.new("b")]} + let(:bindings) {RDF::Query::Solution.new(a: RDF::URI("A"), b: RDF::URI("B"))} + + it "returns itself if not variable" do + expect(constant.evaluate(bindings)).to eq constant + end + + it "returns bound list if variable" do + expect(vars.evaluate(bindings)).to eq constant + end + end + + describe "#solution" do + subject {pattern.solution(abc)} + + specify("pattern[:a] #=> list[0]") { expect(subject[:a]).to eq abc[0]} + specify("pattern[:b] #=> list[1]") { expect(subject[:b]).to eq abc[1]} + end +end diff --git a/spec/reader_spec.rb b/spec/reader_spec.rb index e7c2a38..cdecab6 100644 --- a/spec/reader_spec.rb +++ b/spec/reader_spec.rb @@ -9,8 +9,14 @@ let!(:doap_count) {File.open(doap_nt).each_line.to_a.length} let(:logger) {RDF::Spec.logger} + after(:each) do |example| + puts logger.to_s if + example.exception && + !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) + end + it_behaves_like 'an RDF::Reader' do - let(:reader) {RDF::N3::Reader.new(reader_input)} + let(:reader) {RDF::N3::Reader.new(reader_input, logger: logger)} let(:reader_input) {File.read(doap)} let(:reader_count) {doap_count} end @@ -82,7 +88,7 @@ it "should yield statements" do inner = double("inner") expect(inner).to receive(:called).with(RDF::Statement).exactly(15) - RDF::N3::Reader.new(@sampledoc).each_statement do |statement| + RDF::N3::Reader.new(@sampledoc, logger: logger).each_statement do |statement| inner.called(statement.class) end end @@ -169,7 +175,13 @@ end it "should parse long literal with escape" do - n3 = %(@prefix : . :a :b "\\U00015678another" .) + n3 = %(@prefix : . :a :b """\\U00015678another""" .) + statement = parse(n3).statements.first + expect(statement.object.value).to eq "\u{15678}another" + end + + it "should parse long literal single quote with escape" do + n3 = %(@prefix : . :a :b '''\\U00015678another''' .) statement = parse(n3).statements.first expect(statement.object.value).to eq "\u{15678}another" end @@ -191,11 +203,11 @@ baz more ), - "trailing escaped double-quote" => %q( "), + "trailing escaped double-quote" => %q( " ), "regression.n3" => %q(sameDan.n3 sameThing.n3 --think --apply=forgetDups.n3 --purge --n3="/" ) }.each do |test, string| it "parses #{test}" do - graph = parse(%(:a :b """#{string}""")) + graph = parse(%(:a :b """#{string}""" .)) expect(graph.size).to eq 1 expect(graph.statements.first.object.value).to eq string end @@ -208,7 +220,7 @@ expect(graph.size).to eq 1 statement = graph.statements.first expect(statement.subject).to be_a(RDF::Node) - expect(statement.subject.id).to match(/anon/) + expect(statement.subject.id).to match(/^b\d+/) expect(statement.predicate.to_s).to eq "http://example.org/property" expect(statement.object.to_s).to eq "http://example.org/resource2" end @@ -219,7 +231,7 @@ statement = graph.statements.first expect(statement.subject.to_s).to eq "http://example.org/resource2" expect(statement.predicate).to be_a(RDF::Node) - expect(statement.predicate.id).to match(/anon/) + expect(statement.predicate.id).to match(/^b\d+/) expect(statement.object.to_s).to eq "http://example.org/object" end @@ -230,7 +242,7 @@ expect(statement.subject.to_s).to eq "http://example.org/resource2" expect(statement.predicate.to_s).to eq "http://example.org/property" expect(statement.object).to be_a(RDF::Node) - expect(statement.object.id).to match(/anon/) + expect(statement.object.id).to match(/^b\d+/) end { @@ -312,7 +324,7 @@ { %(<#Dürst> a "URI straight in UTF8".) => %( "URI straight in UTF8" .), - #%(:a :related :ひらがな .) => %( .), + %(:a :related :ひらがな .) => %( .), }.each_pair do |n3, nt| it "for '#{n3}'" do expect(parse(n3, base_uri: "http://a/b", logger: false)).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) @@ -336,7 +348,7 @@ describe "with n3 grammar" do describe "syntactic expressions" do - it "should create typed literals with qname" do + it "should create typed literals with pname" do n3doc = %( @prefix rdf: . @prefix foaf: . @@ -369,20 +381,26 @@ expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should generate rdf:type for '@a'" do + it "should generate rdf:type for '@a'", pending: 'deprecated' do n3 = %(@prefix a: . a:b @a .) nt = %( .) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + expect(parse(n3, base_uri: "http://a/b", validate: tru)).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end it "should generate inverse predicate for 'is xxx of'" do - n3 = %("value" is :prop of :b . :b :prop "value" .) + n3 = %("value" is :prop of :b .) nt = %( "value" .) expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should generate inverse predicate for '@is xxx @of'" do - n3 = %("value" @is :prop @of :b . :b :prop "value" .) + it "should generate inverse predicate for '@is xxx @of'", pending: 'deprecated' do + n3 = %("value" @is :prop @of :b .) + nt = %( "value" .) + expect(parse(n3, base_uri: "http://a/b", validate: true)).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + + it "should generate inverse predicate for '<- xxx'" do + n3 = %("value" <- :prop :b .) nt = %( "value" .) expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end @@ -397,7 +415,7 @@ end it "should generate inverse predicate for 'is xxx of' with blankNodePropertyList" do - n3 = %([ is :prop of :George]) + n3 = %([ is :prop of :George] .) nt = %( _:bn . ) @@ -418,10 +436,10 @@ expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should generate predicate for '@has xxx'" do + it "should generate predicate for '@has xxx'", pending: 'deprecated' do n3 = %(@prefix a: . a:b @has :pred a:c .) nt = %( .) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + expect(parse(n3, base_uri: "http://a/b", validate: tru)).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end it "should create log:implies predicate for '=>'" do @@ -443,8 +461,6 @@ end { - %(:a :b @true) => %( "true"^^ .), - %(:a :b @false) => %( "false"^^ .), %(:a :b 1) => %( "1"^^ .), %(:a :b -1) => %( "-1"^^ .), %(:a :b +1) => %( "+1"^^ .), @@ -456,7 +472,7 @@ }.each_pair do |n3, nt| it "should create typed literal for '#{n3}'" do expected = RDF::NTriples::Reader.new(nt) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + expect(parse("#{n3} .", base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) end end @@ -495,6 +511,17 @@ expect(statement.predicate).not_to equal statement.object end + it "creates variable for quickVar" do + n3 = %(?x :y :z .) + g = parse(n3, base_uri: "http://a/b") + statement = g.statements.first + expect(statement.subject).to be_variable + expect(statement.predicate).not_to be_variable + expect(statement.object).not_to be_variable + expect(statement.subject).not_to equal statement.predicate + expect(statement.subject).not_to equal statement.object + end + it "substitutes node for URI with @forSome" do n3 = %(@forSome :x . :x :y :z .) g = parse(n3, base_uri: "http://a/b") @@ -526,6 +553,14 @@ expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end + it "should not append # for http://foo/bar (sparqlPrefix)" do + n3 = %(PrEfIx : :a : :b .) + nt = %( + . + ) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + it "should not append # for http://foo/bar/" do n3 = %(@prefix : . :a : :b .) nt = %( @@ -551,11 +586,20 @@ expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end + it "should set absolute base (sparqlBase)" do + n3 = %(BaSe <> :a . <#c> :d .) + nt = %( + . + . + ) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + it "should set absolute base (trailing /)" do n3 = %(@base . <> :a . <#c> :d .) nt = %( - . - . + . + . ) expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end @@ -579,21 +623,20 @@ <> :a , <#e>. ) nt = %( - . - . - . - . - . - . + . + . + . + . + . + . ) expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end it "returns defined prefixes" do n3 = %( - @prefix rdf: . + @base . @prefix rdfs: . - @prefix : . :foo a rdfs:Class. :bar :d :c. :a :d :c. @@ -601,87 +644,13 @@ reader = RDF::N3::Reader.new(n3, validate: true) reader.each {|statement|} expect(reader.prefixes).to eq({ - rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + nil => "http://test/#", rdfs: "http://www.w3.org/2000/01/rdf-schema#", - nil => "http://test/" }) end end - describe "keywords" do - [ - %(base <>.), - %(keywords a.), - %(:a is :b of :c.), - %(:a @is :b of :c.), - %(:a is :b @of :c.), - %(:a has :b :c.), - ].each do |n3| - it "should require @ if keywords set to empty for '#{n3}'" do - expect do - parse("@keywords . #{n3}", base_uri: "http://a/b") - end.to raise_error(RDF::ReaderError) - end - end - - [ - %(prefix :<>.), - ].each do |n3| - it "parses as local name if keywords set to empty for '#{n3}'" do - expect do - parse("@keywords . #{n3}", base_uri: "http://a/b") - end.not_to raise_error - end - end - { - %(:a a :b) => %( .), - %(:a :b true) => %( .), - %(:a :b false) => %( .), - %(c :a :t) => %( .), - %(:c a :t) => %( .), - %(:c :a t) => %( .), - }.each_pair do |n3, nt| - it "should use default_ns for '#{n3}'" do - expect(parse("@keywords . #{n3}", base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - end - - { - %(@keywords true. :a :b true.) => %( "true"^^ .), - %(@keywords false. :a :b false.) => %( "false"^^ .), - %(@keywords a. :a a :b.) => %( .), - %(@keywords is. :a is :b @of :c.) => %( .), - %(@keywords of. :a @is :b of :c.) => %( .), - %(@keywords has. :a has :b :c.) => %( .), - } .each_pair do |n3, nt| - it "should use keyword for '#{n3}'" do - expected = RDF::NTriples::Reader.new(nt) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) - end - end - - it "should raise error if unknown keyword set" do - n3 = %(@keywords foo.) - expect do - parse(n3, base_uri: "http://a/b", validate: true) - end.to raise_error(RDF::ReaderError, /Undefined keywords used: foo/) - end - end - describe "declaration ordering" do - it "should process _ namespace binding after an initial use as a BNode" do - n3 = %( - _:a a :p. - @prefix _: . - _:a a :p. - ) - nt = %( - . - _:a . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - it "should allow a prefix to be redefined" do n3 = %( @prefix a: . @@ -705,7 +674,7 @@ ) nt = %( . - . + . . ) expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) @@ -752,143 +721,302 @@ expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should create BNode as a single object" do - n3 = %(@prefix a: . a:b a:oneRef [ a:pp "1" ; a:qq "2" ] .) - nt = %( - _:bnode0 "1" . - _:bnode0 "2" . - _:bnode0 . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should create a shared BNode" do - n3 = %( - @prefix a: . - - a:b1 a:twoRef _:a . - a:b2 a:twoRef _:a . - - _:a :pred [ a:pp "1" ; a:qq "2" ]. - ) - nt = %( - _:a . - _:a . - _:bnode0 "1" . - _:bnode0 "2" . - _:a :pred _:bnode0 . - ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) - end + context "blankNodePropertyList" do + it "should create BNode as a single object" do + n3 = %(@prefix a: . a:b a:oneRef [ a:pp "1" ; a:qq "2" ] .) + nt = %( + _:bnode0 "1" . + _:bnode0 "2" . + _:bnode0 . + ) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end - it "should create nested BNodes" do - n3 = %( - @prefix a: . + it "should create a shared BNode" do + n3 = %( + @prefix a: . - a:a a:p [ a:p2 [ a:p3 "v1" , "v2" ; a:p4 "v3" ] ; a:p5 "v4" ] . - ) - nt = %( - _:bnode0 "v1" . - _:bnode0 "v2" . - _:bnode0 "v3" . - _:bnode1 _:bnode0 . - _:bnode1 "v4" . - _:bnode1 . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end + a:b1 a:twoRef _:a . + a:b2 a:twoRef _:a . - describe "from paths" do - it "should create bnode for path x!p" do - n3 = %(:x2!:y2 :p2 "3" .) - nt = %(:x2 :y2 _:bnode0 ._:bnode0 :p2 "3" .) + _:a :pred [ a:pp "1" ; a:qq "2" ]. + ) + nt = %( + _:a . + _:a . + _:bnode0 "1" . + _:bnode0 "2" . + _:a :pred _:bnode0 . + ) expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) end - it "should create bnode for path x^p" do - n3 = %(:x2^:y2 :p2 "3" .) - nt = %(_:bnode0 :y2 :x2 . _:bnode0 :p2 "3" .) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + it "should create nested BNodes" do + n3 = %( + @prefix a: . + + a:a a:p [ a:p2 [ a:p3 "v1" , "v2" ; a:p4 "v3" ] ; a:p5 "v4" ] . + ) + nt = %( + _:bnode0 "v1" . + _:bnode0 "v2" . + _:bnode0 "v3" . + _:bnode1 _:bnode0 . + _:bnode1 "v4" . + _:bnode1 . + ) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end + end - it "should decode :joe!fam:mother!loc:office!loc:zip as Joe's mother's office's zipcode" do + describe "property lists" do + it "should parse property list" do n3 = %( - @prefix fam: . - @prefix loc: . + @prefix a: . - :joe!fam:mother!loc:office!loc:zip . + a:b a:p1 "123" ; a:p1 "456" . + a:b a:p2 a:v1 ; a:p3 a:v2 . ) nt = %( - :joe _:bnode0 . - _:bnode0 _:bnode1 . - _:bnode1 _:bnode2 . + "123" . + "456" . + . + . ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end + end - it "should decode :joe!fam:mother^fam:mother Anyone whose mother is Joe's mother." do - n3 = %( - @prefix fam: . - @prefix loc: . + describe "collections" do + it "should parse empty list" do + n3 = %(@prefix :. :empty :set ().) + nt = %( + .) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end - :joe!fam:mother^fam:mother . + it "should parse list with single element" do + n3 = %(@prefix :. :gregg :edited ("JSON-LD").) + nt = %( + _:bnode0 "JSON-LD" . + _:bnode0 . + _:bnode0 . ) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + + it "should parse list with multiple elements" do + n3 = %(@prefix :. :gregg :name ("Gregg" "Barnum" "Kellogg").) nt = %( - :joe _:bnode0 . - _:bnode1 _:bnode0 . + _:bnode0 "Gregg" . + _:bnode0 _:bnode1 . + _:bnode1 "Barnum" . + _:bnode1 _:bnode2 . + _:bnode2 "Kellogg" . + _:bnode2 . + _:bnode0 . ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should decode path with property list." do + it "should parse unattached lists" do n3 = %( - @prefix a: . - :a2!a:b2!a:c2 :q1 "3" ; :q2 "4" , "5" . + @prefix a: . + + ("1" "2" "3") . + # This is not a statement. + () . ) nt = %( - :a2 _:bnode0 . - _:bnode0 _:bnode1 . - _:bnode1 :q1 "3" . - _:bnode1 :q2 "4" . - _:bnode1 :q2 "5" . + _:bnode0 "1" . + _:bnode0 _:bnode1 . + _:bnode1 "2" . + _:bnode1 _:bnode2 . + _:bnode2 "3" . + _:bnode2 . ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) end - it "should decode path as object(1)" do - n3 = %(:a :b "lit"^:c.) - nt = %( - :a :b _:bnode . - _:bnode :c "lit" . + it "should add property to empty list" do + n3 = %(@prefix a: . () a:prop "nilProp" .) + nt = %( "nilProp" .) + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + + it "should parse with compound items" do + n3 = %( + @prefix a: . + a:a a:p ( + [ a:p2 "v1" ] + + + ("inner list") + ) . + a:p "value" . ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + g = parse(n3, base_uri: "http://a/b") + expect(g.subjects.to_a.length).to eq 8 + n = g.first_object(subject: RDF::URI.new("http://foo/a#a"), predicate: RDF::URI.new("http://foo/a#p")) + expect(n).to be_node + seq = RDF::List.new(subject: n, graph: g) + expect(seq.to_a.length).to eq 4 + expect(seq.first).to be_node + expect(seq.second).to eq RDF::URI.new("http://resource1") + expect(seq.third).to eq RDF::URI.new("http://resource2") + expect(seq.fourth).to be_node end - it "should decode path as object(2)" do - n3 = %(@prefix a: . :r :p :o!a:p1!a:p2 .) - nt = %( - :o _:bnode0 . - _:bnode0 _:bnode1 . - :r :p _:bnode1 . + it "should use exactly the same object when referencing a list" do + n3 = %(:thing :prop ( 4 ) .) + g = parse(n3) + n1 = g.first_object(predicate: RDF::URI("#prop")) + n2 = g.first_subject(predicate: RDF.first) + expect(n1.object_id).to eq n2.object_id + end + + context "as terms" do + it "should parse list with single element" do + n3 = %(:gregg :edited ("JSON-LD").) + nt = %( + _:bnode0 "JSON-LD" . + _:bnode0 . + _:bnode0 . + ) + g = parse(n3, list_terms: true) + expect(g.count).to eql 1 + + statement = g.statements.first + expect(statement.object).to be_list + + list = statement.object + expect(list.subject).to be_node + expect(list.length).to eql 1 + expect(list.first).to eql RDF::Literal("JSON-LD") + end + end + + it "should parse list with multiple elements" do + n3 = %(:gregg :name ("Gregg" "Barnum" "Kellogg").) + g = parse(n3, list_terms: true) + expect(g.count).to eql 1 + + statement = g.statements.first + expect(statement.object).to be_list + + list = statement.object + expect(list.subject).to be_node + expect(list.length).to eql 3 + expect(list.to_a).to include(RDF::Literal("Gregg"), RDF::Literal("Barnum"), RDF::Literal("Kellogg")) + end + + it "should add property to empty list" do + n3 = %(@prefix a: . () a:prop "nilProp" .) + g = parse(n3, list_terms: true) + expect(g.count).to eql 1 + + statement = g.statements.first + expect(statement.subject).to be_list + expect(statement.subject).to be_empty + end + + it "should parse with compound items" do + n3 = %( + @prefix a: . + a:a a:p ( + [ a:p2 "v1" ] + + + ("inner list") + ) . + a:p "value" . ) - expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) + g = parse(n3, list_terms: true) + expect(g.subjects.to_a.length).to eq 3 + list = g.first_object(subject: RDF::URI.new("http://foo/a#a"), predicate: RDF::URI.new("http://foo/a#p")) + expect(list).to be_list + expect(list.count).to eq 4 + l1, l2, l3, l4 = list.to_a + expect(l1).to be_node + expect(l2).to eq RDF::URI.new("http://resource1") + expect(l3).to eq RDF::URI.new("http://resource2") + expect(l4).to be_list + + n = g.first_object(subject: l1, predicate: RDF::URI.new("http://foo/a#p2")) + expect(n).to eq RDF::Literal("v1") + + expect(l4.count).to be 1 + expect(l4.first).to eq RDF::Literal("inner list") + end + end + + context "property paths" do + { + "subject x!p": [ + %(:x2!:y2 :p2 "3" .), + %(:x2 :y2 _:bnode0 . _:bnode0 :p2 "3" .) + ], + "subject x^p": [ + %(:x2^:y2 :p2 "3" .), + %(_:bnode0 :y2 :x2 . _:bnode0 :p2 "3" .) + ], + "alberts mother inverse of metor to auntieAnne": [ + %(:albert!:mother :mentor!:inverse :auntieAnne .), + %( + :albert :mother _:bnode0 . + _:bnode0 _:pred0 :auntieAnne . + :mentor :inverse _:pred0 . + ) + ], + "albert doesnt admire grumpy": [ + %(:albert :admires!:converse :grumpy .), + %(:albert _:pred0 :grumpy . :admires :converse _:pred0 .) + ], + "1+2=3": [ + %{("1" "2")!:sum a :THREE.}, + %{("1" "2") :sum _:bnode0 . _:bnode0 a :THREE .} + ], + "relatedTo": [ + %{(:a!:b :c^:d) :relatedTo (:e!:f!:g ) .}, + %{ + :a :b _:bnode0 . + _:bnode1 :d :c . + :e :f _:bnode2 . + _:bnode2 :g _:bnode3 . + (_:bnode0 _:bnode1) :relatedTo (_:bnode3) . + } + ], + "joes mothers offices zip": [ + %{:joe!:mother!:office!:zip .}, + %{:joe :mother [:office [:zip []]] .} + ], + "Anyone whose mother is Joe's mother": [ + %{:joe!:mother^:mother .}, + %{:joe :mother _:bnode0 . [:mother _:bnode0] .} + ], + "path as object(1)": [ + %{:a :b "lit"^:c.}, + %{:a :b [:c "lit"] .} + ], + "path as object(2)": [ + %(:r :p :o!:p1!:p2 .), + %{:o :p1 [:p2 _:bnode1] . :r :p _:bnode1 .} + ] + }.each do |title, (n3, res)| + it title do + expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(res, base_uri: "http://a/b")} + expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, logger: logger, format: :n3) + end end end end describe "formulae" do - before(:each) { @repo = RDF::Repository.new } + before(:each) { @repo = RDF::N3:: Repository.new } it "creates an RDF::Node instance for formula" do - n3 = %(:a :b {}) + n3 = %(:a :b {} .) nq = %(:a :b _:c .) result = parse(n3, repo: @repo, base_uri: "http://a/b") expected = parse(nq, repo: @repo, base_uri: "http://a/b") @@ -896,10 +1024,10 @@ end it "adds statements with graph_name" do - n3 = %(:a :b {[:c :d]}) + n3 = %(:a :b {[:c :d]} .) trig = %(<#a> <#b> _:c . _:c {[<#c> <#d>] .}) result = parse(n3, repo: @repo, base_uri: "http://a/b") - expected = RDF::Repository.new {|r| r << RDF::TriG::Reader.new(trig, base_uri: "http://a/b")} + expected = RDF::N3:: Repository.new {|r| r << RDF::TriG::Reader.new(trig, base_uri: "http://a/b")} expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) end @@ -920,17 +1048,59 @@ } ) result = parse(n3, repo: @repo, base_uri: "http://a/b") - expected = RDF::Repository.new {|r| r << RDF::TriG::Reader.new(trig, base_uri: "http://a/b")} + expected = RDF::N3:: Repository.new {|r| r << RDF::TriG::Reader.new(trig, base_uri: "http://a/b")} expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) end - it "creates unique bnodes within different formula" do + it "creates unique bnodes within different formula (bnodes)" do n3 = %( _:a a :Thing . - {_:a a :Thing} => {_:a a :Thing} + {_:a a :Thing} => {_:a a :Thing} . ) result = parse(n3, repo: @repo, base_uri: "http://a/b") - expect(result.statements.uniq.length).to eq 4 + statements = result.statements + logger.debug('sxp') {SXP::Generator.string statements.to_sxp_bin} + expect(statements.length).to produce(4, logger) + # All three bnodes should be distinct + nodes_of_a = statements.map(&:to_a).flatten.select {|r| r.node? && !r.to_s.include?('_form')} + expect(nodes_of_a.uniq.count).to produce(3, logger) + end + + it "creates unique bnodes within different formula (quickvar)" do + n3 = %( + :a a :Thing . + {?a a :Thing} => {?a a :Thing} . + ) + result = parse(n3, repo: @repo, base_uri: "http://a/b") + statements = result.statements + expect(statements.length).to produce(4, logger) + # only one variable + variables = statements.map(&:to_a).flatten.select {|r| r.variable?} + expect(variables.uniq.count).to produce(1, logger) + end + + { + "empty subject" => { + input: %({} .), + expect: %({} .) + }, + "empty object" => { + input: %( {} .), + expect: %( {} .) + }, + "as subject with constant content" => { + input: %({ } .), + expect: %({ } .), + }, + "as object with constant content" => { + input: %( { } .), + expect: %( { } .), + }, + }.each do |name, params| + it name do + result = parse(params[:expect], base_uri: "http://a/b", logger: false) + expect(parse(params[:input], base_uri: "http://a/b")).to be_equivalent_graph(result, logger: logger, format: :n3) + end end context "contexts" do @@ -953,26 +1123,27 @@ # ENDS ) - @repo = RDF::Repository.new + @repo = RDF::N3:: Repository.new parse(n3, repo: @repo, base_uri: "http://a/b") end + subject {@repo} it "assumption graph has 2 statements" do - tt = @repo.first(subject: RDF::URI.new("http://a/b#assumption"), predicate: RDF::OWL.sameAs) + tt = subject.first(subject: RDF::URI.new("http://a/b#assumption"), predicate: RDF::OWL.sameAs) expect(tt.object).to be_node - expect(@repo.query({graph_name: tt.object}).to_a.length).to eq 2 + expect(subject.query({graph_name: tt.object}).to_a.length).to eq 2 end it "conclusion graph has 1 statements" do - tt = @repo.first(subject: RDF::URI.new("http://a/b#conclusion"), predicate: RDF::OWL.sameAs) + tt = subject.first(subject: RDF::URI.new("http://a/b#conclusion"), predicate: RDF::OWL.sameAs) expect(tt.object).to be_node - expect(@repo.query({graph_name: tt.object}).to_a.length).to eq 1 + expect(subject.query({graph_name: tt.object}).to_a.length).to eq 1 end it "trivialTruth equivalent to empty graph" do - tt = @repo.first(subject: RDF::URI.new("http://a/b#trivialTruth"), predicate: RDF::OWL.sameAs) + tt = subject.first(subject: RDF::URI.new("http://a/b#trivialTruth"), predicate: RDF::OWL.sameAs) expect(tt.object).to be_node - @repo.query({graph_name: tt.object}) do |s| + subject.query({graph_name: tt.object}) do |s| puts "statement: #{s}" end end @@ -982,128 +1153,13 @@ describe "object lists" do it "should create 2 statements for simple list" do - n3 = %(:a :b :c, :d) + n3 = %(:a :b :c, :d .) nt = %( . .) expected = RDF::Graph.new {|g| g << RDF::N3::Reader.new(nt, base_uri: "http://a/b")} expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(expected, about: "http://a/b", logger: logger, format: :n3) end end - describe "property lists" do - it "should parse property list" do - n3 = %( - @prefix a: . - - a:b a:p1 "123" ; a:p1 "456" . - a:b a:p2 a:v1 ; a:p3 a:v2 . - ) - nt = %( - "123" . - "456" . - . - . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - end - - describe "lists" do - it "should parse empty list" do - n3 = %(@prefix :. :empty :set ().) - nt = %( - .) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should parse list with single element" do - n3 = %(@prefix :. :gregg :wrote ("RdfContext").) - nt = %( - _:bnode0 "RdfContext" . - _:bnode0 . - _:bnode0 . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should parse list with multiple elements" do - n3 = %(@prefix :. :gregg :name ("Gregg" "Barnum" "Kellogg").) - nt = %( - _:bnode0 "Gregg" . - _:bnode0 _:bnode1 . - _:bnode1 "Barnum" . - _:bnode1 _:bnode2 . - _:bnode2 "Kellogg" . - _:bnode2 . - _:bnode0 . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should parse unattached lists" do - n3 = %( - @prefix a: . - - ("1" "2" "3") . - # This is not a statement. - () . - ) - nt = %( - _:bnode0 "1" . - _:bnode0 _:bnode1 . - _:bnode1 "2" . - _:bnode1 _:bnode2 . - _:bnode2 "3" . - _:bnode2 . - ) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should add property to nil list" do - n3 = %(@prefix a: . () a:prop "nilProp" .) - nt = %( "nilProp" .) - expect(parse(n3, base_uri: "http://a/b")).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) - end - - it "should parse with compound items" do - n3 = %( - @prefix a: . - a:a a:p ( - [ a:p2 "v1" ] - - - ("inner list") - ) . - a:p "value" . - ) - nt = %( - _:bnode3 . - "value" . - _:bnode3 _:bnode5 . - _:bnode3 _:bnode2 . - _:bnode5 "v1" . - _:bnode2 . - _:bnode2 _:bnode1 . - _:bnode1 . - _:bnode1 _:bnode0 . - _:bnode0 _:bnode4 . - _:bnode0 . - _:bnode4 "inner list" . - _:bnode4 . - ) - g = parse(n3, base_uri: "http://a/b") - expect(g.subjects.to_a.length).to eq 8 - n = g.first_object(subject: RDF::URI.new("http://foo/a#a"), predicate: RDF::URI.new("http://foo/a#p")) - expect(n).to be_node - seq = RDF::List.new(subject: n, graph: g) - expect(seq.to_a.length).to eq 4 - expect(seq.first).to be_node - expect(seq.second).to eq RDF::URI.new("http://resource1") - expect(seq.third).to eq RDF::URI.new("http://resource2") - expect(seq.fourth).to be_node - end - - end - # n3p tests taken from http://inamidst.com/n3p/test/ describe "with real data tests" do dirs = %w(misc lcsh rdflib n3p) @@ -1185,43 +1241,45 @@ end describe "canonicalization" do -# { -# "" => "", -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# "" => "", -# "" => "", -# -# "" => "", -# "" => "", -# }.each_pair do |input, result| -# it "returns subject #{result} given #{input}" do -# n3 = %(#{input} a :b) -# nt = %(#{result} .) -# parse(n3, base_uri: "http://a/b", canonicalize: true).should be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) -# end -# end + { + "" => "", + "" => "", + "" => "", + + "" => "", + "" => "", + + "" => "", + "" => "", + "" => "", + + "" => "", + "" => "", + "" => "", + "" => "", + + "" => "", + "" => "", + "" => "", + "" => "", + + "" => "", + + %("+1"^^xsd:integer) => %("1"^^), + %(+1) => %("1"^^), + %(.1) => %("0.1"^^), + %(123.E+1) => %("1.23E3"^^), + %(true) => %("true"^^), + %("lang"@EN) => %("lang"@en), + %("""lang"""@EN) => %("lang"@en), + %("""+1"""^^xsd:integer) => %("1"^^), + }.each_pair do |input, result| + it "returns #{result} given #{input}" do + n3 = %(@prefix xsd: . #{input} .) + nt = %( #{result} .) + expect(parse(n3, base_uri: "http://a/b", canonicalize: true)).to be_equivalent_graph(nt, about: "http://a/b", logger: logger, format: :n3) + end + end { %("+1"^^xsd:integer) => %("1"^^), @@ -1239,20 +1297,30 @@ end describe "validation" do - { - %(:y :p1 "xyz"^^xsd:integer .) => %r("xyz" is not a valid .*), - %(:y :p1 "12xyz"^^xsd:integer .) => %r("12xyz" is not a valid .*), - %(:y :p1 "xy.z"^^xsd:double .) => %r("xy\.z" is not a valid .*), - %(:y :p1 "+1.0z"^^xsd:double .) => %r("\+1.0z" is not a valid .*), - %(:a :b .) =>RDF::ReaderError, - %(:a :b 'single quote' .) => RDF::ReaderError, - %(:a "literal value" :b .) => RDF::ReaderError, - %(@keywords prefix. :e prefix :f .) => RDF::ReaderError - }.each_pair do |n3, error| - it "should raise '#{error}' for '#{n3}'" do + [ + %(:y :p1 "xyz"^^xsd:integer .), + %(:y :p1 "12xyz"^^xsd:integer .), + %(:y :p1 "xy.z"^^xsd:double .), + %(:y :p1 "+1.0z"^^xsd:double .), + %(:a :b .), + #%(:a "literal value" :b .), + %(@keywords prefix. :e prefix :f .), + ].each do |n3| + it "should raise ReaderError for '#{n3}'" do + expect { + parse("@prefix xsd: . #{n3}", base_uri: "http://a/b", validate: true) + }.to raise_error(RDF::ReaderError) + end + end + + [ + %(:y _:p1 "z" .), + %("y" :p1 "z" .), + ].each do |n3| + it "'#{n3}' is valid" do expect { parse("@prefix xsd: . #{n3}", base_uri: "http://a/b", validate: true) - }.to raise_error(error) + }.not_to raise_error end end end @@ -1283,7 +1351,7 @@ def parse(input, **options) validate: false, canonicalize: false, }.merge(options) - repo = options[:repo] || RDF::Repository.new + repo = options[:repo] || [].extend(RDF::Enumerable, RDF::Queryable) RDF::N3::Reader.new(input, **options).each_statement do |statement| repo << statement end diff --git a/spec/reasoner_spec.rb b/spec/reasoner_spec.rb index e9626e6..fb1a33f 100644 --- a/spec/reasoner_spec.rb +++ b/spec/reasoner_spec.rb @@ -4,13 +4,112 @@ describe "RDF::N3::Reasoner" do let(:logger) {RDF::Spec.logger} + before {logger.level = Logger::INFO} + + context "variables" do + context "universals" do + # Universal variables remain in-scope between formulae + end + + context "existentials" do + # Universal variables go out of scope between formulae + end + end context "n3:log" do + context "log:conclusion" do + { + "conclusion-super-simple" => { + input: %( + { + { + { } => { a } . + . + } log:conclusion ?y + } => { ?y a :TestResult }. + ), + expect: %( + { + { + { } => { a } . + . + } log:conclusion ?y + } => { ?y a :TestResult }. + + { + . + a . + { .} => { a .} . + } a :TestResult . + ) + }, + "conclusion-simple" => { + input: %( + { + { } => { a } . + . + } a :TestRule. + + { ?x a :TestRule; log:conclusion ?y } => { ?y a :TestResult }. + ), + expect: %( + { + { } => { a } . + . + } a :TestRule. + + { ?x a :TestRule; log:conclusion ?y } => { ?y a :TestResult }. + + { + . + a . + { .} => { a .} . + } a :TestResult . + ), + #pending: "broken after not matching non-existant patterns" + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + options = {data: false, conclusions: false}.merge(options) + pending(options[:pending]) if options[:pending] + expected = parse(options[:expect]) + result = reason(options[:input], **options) + expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) + end + end + end + + context "log:conjunction" do + { + "conjunction" => { + input: %( + { + ({:sky :color :blue} {:sky :color :green}) + log:conjunction ?F + } => { ?F a :result} . + ), + expect: %( + {:sky :color :blue, :green } a :result . + ) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {data: false, conclusions: true}.merge(options) + expected = parse(options[:expect]) + result = reason(options[:input], **options) + expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) + end + end + end + context "log:implies" do { "r1" => { input: %( - @forAll :a, :b, :c, :x, :y, :z. + @forAll :a, :b. ( "one" "two" ) a :whatever. { (:a :b) a :whatever } log:implies { :a a :SUCCESS. :b a :SUCCESS }. ), @@ -44,10 +143,9 @@ }, "double" => { input: %( - @keywords is, of, a. - dan a Man; home []. - { ?WHO home ?WHERE. ?WHERE in ?REGION } => { ?WHO homeRegion ?REGION }. - { dan home ?WHERE} => {?WHERE in Texas} . + :dan a :Man; :home []. + { ?WHO :home ?WHERE. ?WHERE :in ?REGION } => { ?WHO :homeRegion ?REGION }. + { :dan :home ?WHERE} => {?WHERE :in :Texas} . ), expect: %( :dan a :Man; @@ -62,7 +160,7 @@ ), expect: %( :a :b "A noun", 3.14159265359 . - [ a :Thing] + [ a :Thing] . ) }, "uses variables bound in parent" => { @@ -74,34 +172,872 @@ expect: %( :a :b :c; :d :e. ) + }, + "does not imply facts not in evidence" => { + input: %( + {:s :p :o} => {:test a :Failure}. + ), + expect: %() + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + expected = parse(options[:expect]) + result = reason(options[:input]) + expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) + end + end + end + + context "log:includes" do + { + "t1" => { + input: %( + @prefix log: . + {{ :a :b :c } log:includes { :a :b :c }} => { :test1 a :success } . + ), + expect: %( + :test1 a :success . + ) + }, + "t2" => { + input: %( + @prefix log: . + { { <#theSky> <#is> <#blue> } log:includes {<#theSky> <#is> <#blue>} } => { :test3 a :success } . + { { <#theSky> <#is> <#blue> } log:notIncludes {<#theSky> <#is> <#blue>} } => { :test3_bis a :FAILURE } . + ), + expect: %( + :test3 a :success . + ) + }, + "quantifiers-limited-a1" => { + input: %( + @prefix log: . + {{ :foo :bar :baz } log:includes { :foo :bar :baz }} + => { :testa1 a :success } . + ), + expect: %( + :testa1 a :success . + ) + }, + "quantifiers-limited-a2" => { + input: %( + @prefix log: . + {{ :foo :bar :baz } log:includes { @forSome :foo. :foo :bar :baz }} + => { :testa2 a :success } . + ), + expect: %( + :testa2 a :success . + ) + }, + "quantifiers-limited-b2" => { + input: %( + @prefix log: . + {{ @forSome :foo. :foo :bar :baz } log:includes {@forSome :foo. :foo :bar :baz }} + => { :testb2 a :success } . + ), + expect: %( + :testb2 a :success . + ) + }, + "quantifiers-limited-a1d" => { + input: %( + @prefix log: . + {{ :fee :bar :baz } log:includes { :foo :bar :baz }} + => { :testa1d a :FAILURE } . + ), + expect: %() + }, + "t10b" => { + input: %( + @prefix log: . + { {:theSky :is :blue} log:includes { :theSky :is ?x} } log:implies { :fred :favoriteColor ?x } . + { :fred :favoriteColor :blue } log:implies { :test10b a :success}. + ), + expect: %( + :fred :favoriteColor :blue. + :test10b a :success. + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + expected = parse(options[:expect]) + result = reason(options[:input]) + expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) + end + end + end + + context "log:parsedAsN3" do + { + "i18n" => { + input: %( + {":㐭 :b :c." log:parsedAsN3 ?x} => {?x a log:Formula} . + ), + expect: %( + { } a log:Formula . + ) + }, + "log_parsedAsN3" => { + input: %( + @prefix log: . + @prefix : <#>. + + @forAll :F. + + {""" @prefix : . + @prefix crypto: . + @prefix log: . + @prefix os: . + @prefix string: . + + :foo :credential ; + :forDocument ; + :junk "32746213462187364732164732164321" . + """ log:parsedAsN3 :F} log:implies { :F a :result }. + ), + expect: %( + @prefix rdf: . + @prefix xsd: . + + { + ; + ; + "32746213462187364732164732164321" . + } a . + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {data: false, conclusions: true, base_uri: 'http://example.com/'}.merge(options) + expected = parse(options[:expect]) + result = reason(options[:input], **options) + expect(result).to be_equivalent_graph(expected, logger: logger, format: :n3) + end + end + end + + context "log:n3String" do + { + "i18n" => { + input: %( + {{:㐭 :b :c} log:n3String ?x} => {?x a :interesting}. + ), + regexp: [ + %r("""\s*<#㐭> <#b> <#c> \.\s*""" a <#interesting> \.)m + ] + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + result = reason(options[:input]) + n3str = RDF::N3::Writer.buffer {|writer| writer << result} + + logger.info "result: #{n3str}" + Array(options[:regexp]).each do |re| + logger.info "match: #{re.inspect}" + expect(n3str).to match_re(re, logger: logger, input: n3str), logger.to_s + end + end + end + end + end + + context "n3:list" do + context "list:in" do + { + "1 in (1)": { + input: %( + @prefix list: . + + { 1 list:in (1) } => { :test4a a :SUCCESS }. + ), + expect: %( + :test4a a :SUCCESS. + ) + }, + "1 in ( 1 2 3 4 5)": { + input: %( + @prefix list: . + + { 1 list:in ( 1 2 3 4 5 ) } => { :test4a a :SUCCESS }. + ), + expect: %( + :test4a a :SUCCESS. + ) + }, + "1 in ()": { + input: %( + @prefix list: . + + { 1 list:in () } => { :trap1 a :FAILURE }. + ), + expect: %( + ) + }, + "2 in ( 1 2 3 4 5)": { + input: %( + @prefix list: . + + { 2 list:in ( 1 2 3 4 5 ) } => { :test4b a :SUCCESS }. + ), + expect: %( + :test4b a :SUCCESS. + ) + }, + "thing1 :prop1": { + input: %( + :thing1 :prop1 ( :test5a :test5b :test5c ) . + { ?item list:in [ is :prop1 of :thing1 ] } => { ?item a :SUCCESS } . + ), + expect: %( + :test5a a :SUCCESS. + :test5b a :SUCCESS. + :test5c a :SUCCESS. + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "list:append" do + { + "(1 2 3 4 5) (6) const": { + input: %( + { ((1 2 3 4 5) (6)) list:append (1 2 3 4 5 6)} => {:test1 a :success}. + ), + expect: %( + :test1 a :success. + ) + }, + "(1 2 3 4 5) (6) var": { + input: %( + { ((1 2 3 4 5) (6)) list:append ?item} => {:test2 :is ?item}. + ), + expect: %( + :test2 :is (1 2 3 4 5 6). + ) + }, + "() (1) const": { + input: %( + { (() (1)) list:append (1)} => {:test3 a :success}. + ), + expect: %( + :test3 a :success. + ) + }, + "() (1) var": { + input: %( + { (() (1)) list:append ?item} => {:test4 :is ?item}. + ), + expect: %( + :test4 :is (1). + ) + }, + "thing1 :prop1": { + input: %( + :thing1 :prop1 ( 1 2 3 ) . + :thing2 :prop1 ( 4 ) . + { + ([is :prop1 of :thing1] + [is :prop1 of :thing2]) list:append ?item + } => { + :test5 :is ?item + } . + ), + expect: %( + :test5 :is (1 2 3 4). + ) } }.each do |name, options| it name do - result = parse(options[:expect]) - expect(reason(options[:input])).to be_equivalent_graph(result, logger: logger) + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "list:member" do + { + "1 in (1 2 3 4 5)": { + input: %( + { ( 1 2 3 4 5 ) list:member 1 } => { :test4a a :SUCCESS }. + ), + expect: %( + :test4a a :SUCCESS . + ) + }, + "?x in (1 2 3 4 5)": { + input: %( + { ( 1 2 3 4 5 ) list:member ?x } => { :test4a :is ?x }. + ), + expect: %( + :test4a :is 1 . + :test4a :is 2 . + :test4a :is 3 . + :test4a :is 4 . + :test4a :is 5 . + ) + }, + "Pythag 3 5": { + input: %( + { ((3) (5))!list:member list:member ?z } => { ?z a :Pythagorean }. + ), + expect: %( + 3 a :Pythagorean. + 5 a :Pythagorean. + ) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + end + + context "n3:math" do + context "math:absoluteValue" do + { + '"1"': { + input: %( + { "1" math:absoluteValue 1 } => {:test1a a :SUCCESS}. + ), + expect: %( + :test1a a :SUCCESS . + ) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + if options[:exception] + expect {reason(options[:input], **options)}.to raise_error options[:exception] + else + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + end + + context "math:ceiling" do + { + '"2.6"': { + input: %( + { "2.6" math:ceiling ?x} => { ?x :valueOf "ceiling(2.7)" } . + ), + expect: %( + 3 :valueOf "ceiling(2.7)" . + ) + }, + "-8.1": { + input: %( + { -8.1 math:ceiling ?x } => {:test2a :is ?x}. + ), + expect: %( + :test2a :is -8 . + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "math:difference" do + { + '("8" "3")': { + input: %( + { ("8" "3") math:difference ?x} => { ?x :valueOf "8 - 3" } . + ), + expect: %( + 5 :valueOf "8 - 3" . + ) + }, + '("8")': { + input: %( + { ("8") math:difference ?x } => { ?x :valueOf "8 - (error?)" } . + ), + expect: %() + }, + '(8 3)': { + input: %( + { (8 3) math:difference ?x} => { ?x :valueOf "8 - 3" } . + ), + expect: %( + 5 :valueOf "8 - 3" . + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "math:floor" do + { + '"2.6"': { + input: %( + { "2.6" math:floor ?x} => { ?x :valueOf "floor(2.7)" } . + ), + expect: %( + 2 :valueOf "floor(2.7)" . + ) + }, + '-8.1': { + input: %( + { -8.1 math:floor ?x } => {:test2a :is ?x}. + ), + expect: %( + :test2a :is -9 . + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "math:greaterThan" do + { + '"008" > "70"': { + input: %( + { "008" math:greaterThan "70" } => { :test10 a :FAILURE }. + ), + expect: %() + }, + '"070" > "008"': { + input: %( + { "70" math:greaterThan "008" } => { :test10 a :success }. + ), + expect: %( + :test10 a :success . + ) + } + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "math:product" do + { + '("5" "3" "2")': { + input: %( + { ("5" "3" "2") math:product ?x} => { ?x :valueOf "5 * 3 * 2" } . + ), + expect: %( + 30 :valueOf "5 * 3 * 2" . + ) + }, + '(5 3 2)': { + input: %( + { (5 3 2) math:product ?x} => { ?x :valueOf "5 * 3 * 2" } . + ), + expect: %( + 30 :valueOf "5 * 3 * 2" . + ) + }, + "()": { + input: %( + { () math:product ?x } => { ?x :valuOf " () math:product ?x --- should be 1" }. + ), + expect: %( + 1 :valuOf " () math:product ?x --- should be 1" . + ) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "math:sum" do + { + '("3" "5")': { + input: %( + { ("3" "5") math:sum ?x } => { ?x :valueOf "3 + 5" } . + ), + expect: %( + 8 :valueOf "3 + 5" . + ) + }, + '(3 5 100)': { + input: %( + { (3 5 100) math:sum ?x } => { ?x :valueOf "3 + 5 + 100" } . + ), + expect: %( + 108 :valueOf "3 + 5 + 100" . + ) + }, + "()": { + input: %( + { () math:sum ?x } => { ?x :valuOf " () math:sum ?x --- should be 0" }. + ), + expect: %( + 0 :valuOf " () math:sum ?x --- should be 0" . + ) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + + context "trig" do + { + "0": { + asin: "0.0e0", + sin: "0.0e0", + sinh: "0.0e0", + cos: "1.0e0", + cosh: "1.0e0", + atan: "0.0e0", + tan: "0.0e0", + tanh: "0.0e0", + }, + "3.14159265358979323846": { + cos: "-1.0e0" + }, + # pi/4 + "0.7853981633974483": { + tan: ["1.0e0", "0.9e0"], + }, + # pi/3 + "1.0471975511965976": { + cos: ["0.51e0", "0.49e0"], + }, + }.each do |subject, params| + params.each do |fun, object| + it "#{subject} math:#{fun} #{object}" do + if object.is_a?(Array) + input = %({ #{subject} math:#{fun} _:x . _:x math:lessThan #{object.first}; math:greaterThan #{object.last} } => { :#{fun} a :SUCCESS} .) + expect = %(:#{fun} a :SUCCESS .) + else + input = %({ #{subject} math:#{fun} ?x } => { #{subject} :#{fun} ?x} .) + expect = %(#{subject} :#{fun} #{object} .) + end + logger.info "input: #{input}" + expected = parse(expect) + expect(reason(input, conclusions: true)).to be_equivalent_graph(expected, logger: logger) + end + end + end + end + end + + context "math-test" do + { + "A nested rule": { + input: %( + { ?x is math:sum of (3 (8 3)!math:difference ) } + => { ?x :valueOf "3 + (8 - 3)" } . + ), + expect: %( + 8 :valueOf "3 + (8 - 3)" . + ) + }, + "Big test": { + input: %( + { ( ("7" "2")!math:quotient + (("7" "2")!math:remainder "10000000")!math:exponentiation + ("a" "b" "c" "d" "e")!list:length + ) math:sum ?x } => + { ?x :valueOf "(7 / 2) + ((7 % 2)^10000000) + 5 [should be 9.5]" } . + ), + expect: %( + 9.5 :valueOf "(7 / 2) + ((7 % 2)^10000000) + 5 [should be 9.5]" . + ) + }, + "Combinatorial test - concatenation": { + input: %( + @prefix string: . + "3.1415926" a :testValue. + 3.1415926 a :testValue. + "1729" a :testValue. + 1729 a :testValue. + "0" a :testValue. + 0 a :testValue. + { ?x a :testValue. ?y a :testValue. + (?x ?y) math:sum ?z. + (?x " + " ?y " = " ?z ) string:concatenation ?s + } => { ?s a :RESULT }. + ), + expect: %( + "0 + 0 = 0" a :RESULT . + "0 + 1729 = 1729" a :RESULT . + "0 + 3.1415926 = 3.1415926" a :RESULT . + "1729 + 0 = 1729" a :RESULT . + "1729 + 1729 = 3458" a :RESULT . + "1729 + 3.1415926 = 1732.1415926" a :RESULT . + "3.1415926 + 0 = 3.1415926" a :RESULT . + "3.1415926 + 1729 = 1732.1415926" a :RESULT . + "3.1415926 + 3.1415926 = 6.2831852" a :RESULT . + ) + }, + "Combinatorial test - worksWith": { + input: %( + "3.1415926" a :testValue. + 3.1415926 a :testValue. + "1729" a :testValue. + 1729 a :testValue. + "0" a :testValue. + 0 a :testValue. + { ?x a :testValue. ?y a :testValue. + ?z is math:sum of (?x (?y ?x)!math:difference). + ?z math:equalTo ?y } => {?x :worksWith ?y}. + ), + expect: %( + 0 a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + + "0" a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + + "3.1415926" a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + + 3.1415926 a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + + 1729 a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + + "1729" a :testValue; + :worksWith "1729", + 1729, + 0, + "3.1415926", + "0", + 3.1415926 . + ), + conclusions: false, data: true + }, + }.each do |name, options| + it name do + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + logger.info "input: #{options[:input]}" + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) end end end end context "n3:string" do - context "string:startsWith" do + context "string:concatenation" do { - "literal starts with literal" => { + "string": { input: %( @prefix string: . - :a :b :c . - {"abc" string:startsWith "a"} => {:test a :Success}. + {("foo" "bar") string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "foobar" .) + }, + "integer": { + input: %( + @prefix string: . + {(1 01) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "11" .) + }, + "decimal": { + input: %( + @prefix string: . + {(0.0 1.0 2.5 -2.5) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "012.5-2.5" .) + }, + "boolean": { + input: %( + @prefix string: . + @prefix xsd: . + {( + true + false + "0"^^xsd:boolean + ) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "truefalsefalse" .) + }, + "float": { + input: %( + @prefix string: . + @prefix xsd: . + {( + "0E1"^^xsd:float + "1E0"^^xsd:float + "1.25"^^xsd:float + "-7.875"^^xsd:float + ) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "011.25-7.875" .) + }, + "double": { + input: %( + @prefix string: . + {(0E1 1E0 1.23E3) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "011230" .) + }, + "IRI": { + input: %( + @prefix string: . + {(:a " " :b) string:concatenation ?x} => {:test :is ?x}. + ), + expect: %(:test :is "http://example.org/a http://example.org/b" .), + base_uri: "http://example.org/" + }, + "test13g": { + input: %( + @prefix string: . + { "" log:equalTo [ is string:concatenation of () ] } => {:test13g a :success}. ), expect: %( - :a :b :c . - :test a :Success. + :test13g a :success . ) } }.each do |name, options| it name do - result = parse(options[:expect]) - expect(reason(options[:input])).to be_equivalent_graph(result, logger: logger) + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect], base_uri: options[:base_uri]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + + context "string:startsWith" do + { + "literal starts with literal" => { + input: %( + @prefix string: . + {"abc" string:startsWith "a"} => {:test a :Success}. + ), + expect: %(:test a :Success.) + }, + "ext starts with literal" => { + input: %( + @prefix string: . + :abc :value "abc" . + {[ is :value of :abc] string:startsWith "a"} => {:test a :Success}. + ), + expect: %(:test a :Success.) + }, + "literal starts with ext" => { + input: %( + @prefix string: . + :a :value "a" . + {"abc" string:startsWith [is :value of :a]} => {:test a :Success}. + ), + expect: %(:test a :Success.) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) + end + end + end + end + + context "n3:time" do + context "time:day" do + { + "2002-06-22T22:09:32-05:00" => { + input: %( + { "2002-06-22T22:09:32-05:00" time:day ?x } => { :test1 :is "22" }. + ), + expect: %(:test1 :is "22".) + }, + }.each do |name, options| + it name do + logger.info "input: #{options[:input]}" + pending(options[:pending]) if options[:pending] + options = {conclusions: true}.merge(options) + expected = parse(options[:expect]) + expect(reason(options[:input], **options)).to be_equivalent_graph(expected, logger: logger) end end end @@ -109,7 +1045,7 @@ # Parse N3 input into a repository def parse(input, **options) - repo = options[:repo] || RDF::Repository.new + repo = options[:repo] || RDF::N3:: Repository.new RDF::N3::Reader.new(input, **options).each_statement do |statement| repo << statement end @@ -117,19 +1053,25 @@ def parse(input, **options) end # Reason over input, returning a repo - def reason(input, base_uri: 'http://example.com/', filter: false, data: true, think: true, **options) - input = parse(input, **options) if input.is_a?(String) - reasoner = RDF::N3::Reasoner.new(input, base_uri: base_uri) - repo = RDF::Repository.new + def reason(input, base_uri: nil, conclusions: false, data: true, think: true, **options) + input = parse(input, list_terms: true, base_uri: base_uri, **options) if input.is_a?(String) + reasoner = RDF::N3::Reasoner.new(input, logger: logger, base_uri: base_uri) + repo = RDF::N3:: Repository.new - reasoner.execute(logger: logger, think: think) - if filter + reasoner.execute(think: think) + if conclusions repo << reasoner.conclusions elsif data repo << reasoner.data else repo << reasoner end - repo + + # Expand results with embedded lists to ease comparison + RDF::Repository.new do |r| + repo.each_expanded_statement do |st| + r << st + end + end end end diff --git a/spec/repository_spec.rb b/spec/repository_spec.rb new file mode 100644 index 0000000..d061b1c --- /dev/null +++ b/spec/repository_spec.rb @@ -0,0 +1,119 @@ +require_relative 'spec_helper' +require 'rdf/spec/repository' + +describe RDF::N3::Repository do + # @see lib/rdf/spec/repository.rb in rdf-spec + it_behaves_like 'an RDF::Repository' do + let(:repository) { RDF::N3::Repository.new } + end + + it { is_expected.not_to be_durable } + + let(:list_subject) {RDF::Statement.new(RDF::N3::List[RDF::Literal('a'), RDF::Literal('b')], RDF::URI('p'), RDF::Literal('o'))} + let(:list_object) {RDF::Statement.new(RDF::URI('s'), RDF::URI('p'), RDF::N3::List[RDF::Literal('a'), RDF::Literal('b')])} + + it "maintains arbitrary options" do + repository = RDF::N3::Repository.new(foo: :bar) + expect(repository.options).to have_key(:foo) + expect(repository.options[:foo]).to eq :bar + end + + describe '#query_pattern' do + before { subject.insert(*(RDF::Spec.quads + [list_subject, list_object])) } + + it "finds a list subject constant" do + pattern = RDF::Query::Pattern.new(list_subject.subject, nil, nil) + solutions = [] + subject.send(:query_pattern, pattern) {|s| solutions << s} + + expect(solutions.size).to eq 1 + end + end + + describe '#insert_to' do + it "inserts a statement with a list subject" do + subject << list_subject + expect(subject.count).to eql 1 + expect(subject.statements.first).to eql list_subject + end + + it "inserts a statement with a list object" do + subject << list_object + expect(subject.count).to eql 1 + expect(subject.statements.first).to eql list_object + end + end + + describe '#has_statement' do + it "detects a statement with a list subject" do + subject << list_subject + expect(subject).to have_statement(list_subject) + end + + it "detects a statement with a list object" do + subject << list_object + expect(subject).to have_statement(list_object) + end + end + + describe '#delete_from' do + it "deletes a statement with a list subject" do + subject << list_subject + subject.delete(list_subject) + expect(subject.count).to eql 0 + end + + it "deletes a statement with a list object" do + subject << list_object + subject.delete(list_object) + expect(subject.count).to eql 0 + end + end + + describe '#each_expanded_statement' do + context "with standard quads" do + before {subject << RDF::Spec.quads} + it {is_expected.to respond_to(:each_expanded_statement)} + its(:each_expanded_statement) {is_expected.to be_an_enumerator} + its(:each_expanded_statement) {expect(subject.each_expanded_statement.to_a).to all(be_statement)} + end + + { + "straight triple": { + input: RDF::N3::Repository.new {|g| g << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::URI('o'))}, + result: RDF::Repository.new {|r| r << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::URI('o'))} + }, + "list subject": { + input: RDF::N3::Repository.new {|r| r << RDF::Statement(RDF::N3::List['a'], RDF::URI('p'), RDF::URI('o'))}, + result: RDF::Repository.new { |r| + r << RDF::Statement(RDF::Node.intern(:l1), RDF::URI('p'), RDF::URI('o')) + r << RDF::Statement(RDF::Node.intern(:l1), RDF.first, 'a') + r << RDF::Statement(RDF::Node.intern(:l1), RDF.rest, RDF.nil) + } + }, + "list object": { + input: RDF::N3::Repository.new {|r| r << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::N3::List['a'])}, + result: RDF::Repository.new { |r| + r << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::Node.intern(:l1)) + r << RDF::Statement(RDF::Node.intern(:l1), RDF.first, 'a') + r << RDF::Statement(RDF::Node.intern(:l1), RDF.rest, RDF.nil) + } + }, + "embedded list": { + input: RDF::N3::Repository.new {|r| r << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::N3::List[RDF::N3::List['a']])}, + result: RDF::Repository.new { |r| + r << RDF::Statement(RDF::URI('s'), RDF::URI('p'), RDF::Node.intern(:l1)) + r << RDF::Statement(RDF::Node.intern(:l1), RDF.first, RDF::Node.intern(:l2)) + r << RDF::Statement(RDF::Node.intern(:l1), RDF.rest, RDF.nil) + r << RDF::Statement(RDF::Node.intern(:l2), RDF.first, 'a') + r << RDF::Statement(RDF::Node.intern(:l2), RDF.rest, RDF.nil) + } + }, + }.each do |name, params| + it name do + expanded = RDF::Repository.new {|r| r << params[:input].each_expanded_statement} + expect(expanded).to be_isomorphic_with(params[:result]) + end + end + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 1df5aa0..29f9173 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -9,20 +9,25 @@ require 'rdf/ntriples' require 'rdf/spec' require 'rdf/spec/matchers' -require 'yaml' # XXX should be in open-uri/cached + +begin + require 'simplecov' + require 'coveralls' + SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([ + SimpleCov::Formatter::HTMLFormatter, + Coveralls::SimpleCov::Formatter + ]) + SimpleCov.start do + add_filter "/spec/" + end +rescue LoadError => e + STDERR.puts "Coverage Skipped: #{e.message}" +end ::RSpec.configure do |c| c.filter_run focus: true c.run_all_when_everything_filtered = true - c.exclusion_filter = { - ruby: lambda { |version| !(RUBY_VERSION.to_s =~ /^#{version.to_s}/) }, - } -end - -module RDF - module Isomorphic - alias_method :==, :isomorphic_with? - end + c.filter_run_excluding slow: true unless ENV['SLOW'] end # Heuristically detect the input stream diff --git a/spec/suite_extended_spec.rb b/spec/suite_extended_spec.rb new file mode 100644 index 0000000..a0f4dee --- /dev/null +++ b/spec/suite_extended_spec.rb @@ -0,0 +1,81 @@ +require_relative 'spec_helper' +require 'rdf/trig' # For formatting error descriptions + +describe RDF::N3::Reader do + # W3C N3 Test suite from http://www.w3.org/2000/10/swap/test/n3parser.tests + describe "w3c n3 tests" do + let(:logger) {RDF::Spec.logger} + + after(:each) do |example| + puts logger.to_s if + example.exception && + !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) + end + + require_relative 'suite_helper' + Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/N3/tests/N3Tests/manifest-extended.ttl") do |m| + describe m.label do + m.entries.each do |t| + next if t.approval == 'rdft:Rejected' + specify "#{t.name}: #{t.comment}", slow: t.slow? do + case t.name + when *%w(07test_utf8.n3) + pending("invalid byte sequence in UTF-8") + when *%w(01etc_skos-extra-rules.n3 01etc_skos-rules.n3 07test_pd_hes_theory.n3) + pending("@keywords") + when *%w(01etc_train_model.n3 04test_icalQ002.n3 04test_icalR.n3 04test_LanguageQ.n3 + 04test_LanguageQ.n3 04test_query-survey-11.n3 04test_query-survey-13.n3 + 04test_icalQ001.n3) + pending("variable filter syntax") + when *%w(04test_ontology-for-data-model.n3) + pending("invalid literal") + end + + + t.logger = logger + t.logger.info t.inspect + t.logger.info "source:\n#{t.input}" + + reader = RDF::N3::Reader.new(t.input, + base_uri: t.base, + canonicalize: false, + validate: true, + logger: t.logger) + + repo = [].extend(RDF::Enumerable, RDF::Queryable) + + output_repo = if t.evaluate? + begin + format = detect_format(t.expected) + RDF::N3:: Repository.load(t.result, format: format, base_uri: t.accept) + rescue Exception => e + expect(e.message).to produce("Exception loading output #{e.inspect}", t) + end + end + + if t.positive_test? + begin + reader.each_statement {|st| repo << st} + rescue Exception => e + expect(e.message).to produce("Not exception #{e.inspect}", t) + end + + if t.evaluate? + expect(repo).to be_equivalent_graph(output_repo, t) + else + expect(repo).to be_enumerable + end + elsif t.syntax? + expect { + reader.each_statement {|st| repo << st} + expect(repo.count).to produce("not this", t) + }.to raise_error(RDF::ReaderError) + else + expect(repo).not_to be_equivalent_graph(output_repo, t) + end + end + end + end + end + end +end unless ENV['CI'] \ No newline at end of file diff --git a/spec/suite_helper.rb b/spec/suite_helper.rb index a6b7ac2..ee7a2a8 100644 --- a/spec/suite_helper.rb +++ b/spec/suite_helper.rb @@ -1,11 +1,12 @@ +require 'rdf/turtle' require 'rdf/n3' require 'json/ld' # For now, override RDF::Utils::File.open_file to look for the file locally before attempting to retrieve it module RDF::Util module File - REMOTE_PATH = "https://w3c.github.io/n3/" - LOCAL_PATH = ::File.expand_path("../w3c-n3", __FILE__) + '/' + REMOTE_PATH = "https://w3c.github.io/N3/" + LOCAL_PATH = ::File.expand_path("../w3c-N3", __FILE__) + '/' class << self alias_method :original_open_file, :open_file @@ -59,7 +60,7 @@ def self.open_file(filename_or_url, **options, &block) remote_document end else - original_open_file(filename_or_url, options, &block) + original_open_file(filename_or_url, **options, &block) end end end @@ -74,17 +75,20 @@ module SuiteTest "mf": "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#", "mq": "http://www.w3.org/2001/sw/DataAccess/tests/test-query#", "rdft": "http://www.w3.org/ns/rdftest#", - "test": "https://w3c.github.io/n3/tests/test.n3#", + "test": "https://w3c.github.io/N3/tests/test.n3#", + "action": {"@id": "mf:action", "@type": "@id"}, + "approval": {"@id": "rdft:approval", "@type": "@vocab"}, "comment": "rdfs:comment", + "conclusions": {"@id": "test:conclusions", "@type": "xsd:boolean"}, + "data": {"@id": "test:data", "@type": "xsd:boolean"}, "entries": {"@id": "mf:entries", "@container": "@list"}, + "filter": {"@id": "test:filter", "@type": "@id"}, "name": "mf:name", - "action": {"@id": "mf:action", "@type": "@id"}, - "result": {"@id": "mf:result", "@type": "@id"}, "options": {"@id": "test:options", "@type": "@id"}, - "data": {"@id": "test:data", "@type": "xsd:boolean"}, - "think": {"@id": "test:think", "@type": "xsd:boolean"}, - "filter": {"@id": "test:filter", "@type": "xsd:boolean"}, - "rules": {"@id": "test:rules", "@type": "xsd:boolean"} + "result": {"@id": "mf:result", "@type": "@id"}, + "rules": {"@id": "test:rules", "@type": "xsd:boolean"}, + "strings": {"@id": "test:strings", "@type": "xsd:boolean"}, + "think": {"@id": "test:think", "@type": "xsd:boolean"} }, "@type": "mf:Manifest", "entries": { @@ -100,7 +104,7 @@ module SuiteTest class Manifest < JSON::LD::Resource def self.open(file) #puts "open: #{file}" - g = RDF::Repository.load(file, format: :n3) + g = RDF::N3:: Repository.load(file) JSON::LD::API.fromRDF(g) do |expanded| JSON::LD::API.frame(expanded, FRAME) do |framed| yield Manifest.new(framed) @@ -115,6 +119,62 @@ def entries end class Entry < JSON::LD::Resource + # Slow tests, skipped by default + SLOW = %w( + 01etc_10tt_proof.n3 + 01etc_4color_proof.n3 + 01etc_bankSW.n3 + 01etc_biE.n3 + 01etc_bmi_proof.n3 + 01etc_data.n3 + 01etc_easter-proof.n3 + 01etc_easterE.n3 + 01etc_fcm_proof.n3 + 01etc_fgcm_proof.n3 + 01etc_fibE.n3 + 01etc_gedcom-proof.n3 + 01etc_gps-proof2.n3 + 01etc_graph-1000.n3 + 01etc_graph-10000.n3 + 01etc_mmln-gv-mln.n3 + 01etc_mmln-gv-proof.n3 + 01etc_mq_proof.n3 + 01etc_palindrome-proof.n3 + 01etc_palindrome2-proof.n3 + 01etc_pi-proof.n3 + 01etc_polynomial.n3 + 01etc_proof-1000.n3 + 01etc_proof-10000.n3 + 01etc_proof-2-1000.n3 + 01etc_proof-2-10000.n3 + 01etc_randomsample-proof.n3 + 01etc_result.n3 + 01etc_rifE.n3 + 01etc_swet_proof.n3 + 01etc_takE.n3 + 01etc_test-dl-1000.n3 + 01etc_test-dt-1000.n3 + 01etc_test-proof-1000.n3 + 01etc_test_proof.n3 + 01etc_train_model.n3 + 04test_not-galen.n3 + 04test_radlex.n3 + 05smml_FACTSboxgeometrydetection.n3 + 05smml_FACTShousewallsmeshed.n3 + 05smml_FACTSlinkfaceedgestoobjects.n3 + 05smml_FACTSlinkfacestoobjects.n3 + 05smml_FACTStriangleedges.n3 + 07test_bd-result-1000.n3 + 07test_biR.n3 + 07test_fgcm_proof.n3 + 07test_graph-10000.n3 + 07test_gv-mln.n3 + 07test_path-1024-3.n3 + 07test_path-256-3.n3 + 07test_pd_hes_result.n3 + 07test_test-strela-1000.n3 + ) + attr_accessor :logger # For debug output formatting @@ -128,6 +188,10 @@ def name id.to_s.split('#').last end + def slow? + SLOW.include?(name) + end + # Alias data and query def input @input ||= RDF::Util::File.open_file(action) {|f| f.read} @@ -138,31 +202,27 @@ def expected end def positive_test? - !attributes['@type'].to_s.match(/Negative/) + attributes['@type'].to_s.match?(/N3Positive|N3Eval|N3Reason/) end def negative_test? - !positive_test? + attributes['@type'].to_s.match?(/N3Negative/) end def evaluate? - !!attributes['@type'].to_s.match(/Eval/) + !!attributes['@type'].to_s.match?(/N3Eval/) end def reason? - !!attributes['@type'].to_s.match(/Reason/) + !!attributes['@type'].to_s.match?(/N3Reason/) end def syntax? - !!attributes['@type'].to_s.match(/Syntax/) - end - - def reason? - !!attributes['@type'].to_s.match(/Reason/) + !!attributes['@type'].to_s.match?(/Syntax/) end def inspect - super.sub('>', "\n" + + super.sub(/>$/, "\n" + " positive?: #{positive_test?.inspect}\n" + " syntax?: #{syntax?.inspect}\n" + " eval?: #{evaluate?.inspect}\n" + diff --git a/spec/suite_parser_spec.rb b/spec/suite_parser_spec.rb index d3402f2..0d0273f 100644 --- a/spec/suite_parser_spec.rb +++ b/spec/suite_parser_spec.rb @@ -4,22 +4,29 @@ describe RDF::N3::Reader do # W3C N3 Test suite from http://www.w3.org/2000/10/swap/test/n3parser.tests describe "w3c n3 tests" do + let(:logger) {RDF::Spec.logger} + + after(:each) do |example| + puts logger.to_s if + example.exception && + !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) + end + require_relative 'suite_helper' - Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/n3/tests/manifest-parser.n3") do |m| + Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/N3/tests/N3Tests/manifest.ttl") do |m| describe m.label do m.entries.each do |t| + next if t.approval == 'rdft:Rejected' specify "#{t.name}: #{t.comment}" do case t.name - when *%w(n3_10004 n3_10007 n3_10014 n3_10015 n3_10017) - pending("Reification not supported") - when *%w(n3_10013) - pending("numeric representation") - when *%w(n3_10003 n3_10006) - pending("Verified test results are incorrect") + when *%w(cwm_syntax_numbers.n3) + pending("number representation") + when *%w(cwm_syntax_too-nested.n3) + skip("stack overflow") end - t.logger = RDF::Spec.logger + t.logger = logger t.logger.info t.inspect t.logger.info "source:\n#{t.input}" @@ -29,22 +36,22 @@ validate: true, logger: t.logger) - repo = RDF::Repository.new + repo = [].extend(RDF::Enumerable, RDF::Queryable) output_repo = if t.evaluate? begin format = detect_format(t.expected) - RDF::Repository.load(t.result, format: format, base_uri: t.accept) + RDF::N3:: Repository.load(t.result, format: format, base_uri: t.accept) rescue Exception => e - expect(e.message).to produce("Exception loading output #{e.inspect}", t.logger) + expect(e.message).to produce("Exception loading output #{e.inspect}", t) end end if t.positive_test? begin - repo << reader + reader.each_statement {|st| repo << st} rescue Exception => e - expect(e.message).to produce("Not exception #{e.inspect}", t.logger) + expect(e.message).to produce("Not exception #{e.inspect}", t) end if t.evaluate? @@ -54,8 +61,8 @@ end elsif t.syntax? expect { - repo << reader - repo.dump(:nquads).should produce("not this", t.logger) + reader.each_statement {|st| repo << st} + expect(repo.count).to produce("not this", t) }.to raise_error(RDF::ReaderError) else expect(repo).not_to be_equivalent_graph(output_repo, t) diff --git a/spec/suite_reasoner_spec.rb b/spec/suite_reasoner_spec.rb index c4ac34e..3450d5e 100644 --- a/spec/suite_reasoner_spec.rb +++ b/spec/suite_reasoner_spec.rb @@ -5,67 +5,86 @@ # W3C N3 Test suite from http://www.w3.org/2000/10/swap/test/n3parser.tests describe "w3c n3 tests" do require_relative 'suite_helper' + let(:logger) {RDF::Spec.logger} + before {logger.level = Logger::INFO} - Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/n3/tests/manifest-reasoner.n3") do |m| + #after(:each) do |example| + # puts logger.to_s if + # example.exception && + # !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) + #end + + Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/N3/tests/N3Tests/manifest-reasoner.ttl") do |m| describe m.label do m.entries.each do |t| + next if t.approval == 'rdft:Rejected' specify "#{t.name}: #{t.comment}" do case t.id.split('#').last - when *%w{listin bnode concat t2006} - pending "support for lists" - when *%w{t1018b2 t103 t104 t105 concat} - pending "support for string" - when *%w{t06proof} - pending "support for math" - when *%w{t01} - pending "support for log:supports" # log:supports not defined in vocabulary - when *%w{conclusion-simple} - pending "support for log:conclusion" - when *%w{conjunction} - pending "support for log:conjunction" - when *%w{t553 t554} - pending "support for inference over quoted graphs" - when *%w{t2005} - pending "something else" + when *%w{cwm_unify_unify1 cwm_unify_unify2 cwm_includes_builtins + cwm_includes_t11 + cwm_includes_conclusion} + pending "log:includes etc." + when *%w{cwm_supports_simple cwm_string_roughly} + pending "Uses unsupported builtin" + when *%w{cwm_string_uriEncode cwm_includes_quantifiers_limited + cwm_list_append} + skip "Blows up" + when *%w{cwm_list_builtin_generated_match} + skip("List reification") + when *%w{log_parsedAsN3} + pending "Emergent problem comparing results" end - t.logger = RDF::Spec.logger + t.logger = logger t.logger.info t.inspect t.logger.info "source:\n#{t.input}" - reader = RDF::N3::Reader.new(t.input, - base_uri: t.base, + reader = RDF::N3::Reader.open(t.action, canonicalize: false, - validate: false) + list_terms: true, + validate: false, + logger: false) reasoner = RDF::N3::Reasoner.new(reader, - base_uri: t.base, logger: t.logger) - repo = RDF::Repository.new + repo = RDF::N3:: Repository.new if t.positive_test? begin reasoner.execute(logger: t.logger, think: !!t.options['think']) - if t.options["filter"] + if t.options["conclusions"] repo << reasoner.conclusions elsif t.options["data"] repo << reasoner.data + elsif t.options["strings"] + t.logger.info "result:\n#{reasoner.strings}" + expect(reasoner.strings).to produce(t.expected, t) + next else repo << reasoner end rescue Exception => e expect(e.message).to produce("Not exception #{e.inspect}: #{e.backtrace.join("\n")}", t) end + + t.logger.info "result:\n#{repo.dump(:n3)}" if t.evaluate? || t.reason? - output_repo = RDF::Repository.load(t.result, format: :n3, base_uri: t.base) - expect(repo).to be_equivalent_graph(output_repo, t) + result_repo = RDF:: Repository.load(t.result, format: :n3) + + # Check against expanded triples from repo + expanded_repo = RDF::Repository.new do |r| + repo.each_expanded_statement do |st| + r << st + end + end + expect(expanded_repo).to be_equivalent_graph(result_repo, t) else end else expect { - graph << reader - expect(graph.dump(:nquads)).to produce("not this", t) + repo << reader + expect(repo.dump(:nquads)).to produce("not this", t) }.to raise_error(RDF::ReaderError) end end diff --git a/spec/suite_turtle_spec.rb b/spec/suite_turtle_spec.rb new file mode 100644 index 0000000..c85431b --- /dev/null +++ b/spec/suite_turtle_spec.rb @@ -0,0 +1,50 @@ +$:.unshift "." +require 'spec_helper' + +describe RDF::N3::Reader do + # W3C Turtle Test suite from http://w3c.github.io/rdf-tests/turtle/manifest.ttl + describe "w3c turtle tests" do + require 'suite_helper' + + Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/N3/tests/TurtleTests/manifest.ttl") do |m| + describe m.comment do + m.entries.each do |t| + next if t.approval == 'rdft:Rejected' + specify "#{t.name}: #{t.comment}" do + t.logger = RDF::Spec.logger + t.logger.info t.inspect + t.logger.info "source:\n#{t.input}" + + reader = RDF::N3::Reader.new(t.input, + base_uri: t.base, + canonicalize: false, + validate: true, + logger: t.logger) + + graph = [].extend(RDF::Enumerable, RDF::Queryable) + + if t.positive_test? + begin + reader.each_statement {|st| graph << st} + rescue Exception => e + expect(e.message).to produce("Not exception #{e.inspect}", t) + end + + if t.evaluate? + output_graph = RDF::Repository.load(t.result, format: :ntriples, base_uri: t.base) + expect(graph).to be_equivalent_graph(output_graph, t) + else + expect(graph).to be_a(RDF::Enumerable) + end + else + expect { + reader.each_statement {|st| graph << st} + expect(graph.dump(:ntriples)).to produce("not this", t) + }.to raise_error(RDF::ReaderError) + end + end + end + end + end + end +end unless ENV['CI'] \ No newline at end of file diff --git a/spec/swap b/spec/swap deleted file mode 120000 index 3978bda..0000000 --- a/spec/swap +++ /dev/null @@ -1 +0,0 @@ -../../swap/ \ No newline at end of file diff --git a/spec/w3c-n3 b/spec/w3c-n3 deleted file mode 120000 index 149ed6a..0000000 --- a/spec/w3c-n3 +++ /dev/null @@ -1 +0,0 @@ -../../w3c-n3 \ No newline at end of file diff --git a/spec/writer_spec.rb b/spec/writer_spec.rb index e4555ef..7113897 100644 --- a/spec/writer_spec.rb +++ b/spec/writer_spec.rb @@ -7,6 +7,12 @@ describe RDF::N3::Writer do let(:logger) {RDF::Spec.logger} + #after(:each) do |example| + # puts logger.to_s if + # example.exception && + # !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) + #end + it_behaves_like 'an RDF::Writer' do let(:writer) {RDF::N3::Writer.new(StringIO.new)} end @@ -38,7 +44,7 @@ regexp: [ %r(^@base \.$), %r(^ \.$)], base_uri: "http://a/" }, - "qname URIs with prefix" => { + "pname URIs with prefix" => { input: %( .), regexp: [ %r(^@prefix ex: \.$), @@ -46,7 +52,7 @@ ], prefixes: {ex: "http://example.com/"} }, - "qname URIs with empty prefix" => { + "pname URIs with empty prefix" => { input: %( .), regexp: [ %r(^@prefix : \.$), @@ -55,7 +61,7 @@ prefixes: {"" => "http://example.com/"} }, # see example-files/arnau-registered-vocab.rb - "qname URIs with empty suffix" => { + "pname URIs with empty suffix" => { input: %( .), regexp: [ %r(^@prefix foaf: \.$), @@ -110,10 +116,6 @@ input: %(@prefix ex: . ex:a ex:b [ex:c ex:d] .), regexp: [%r(^ex:a ex:b \[ex:c ex:d\] \.$)], }, - "reuses BNode labels by default" => { - input: %(@prefix ex: . _:a ex:b _:a .), - regexp: [%r(^\s*_:a(_\d+_\d+) ex:b _:a\1 \.$)] - }, "standard prefixes" => { input: %( a ; @@ -127,15 +129,15 @@ ], standard_prefixes: true, prefixes: {} }, - "should not use qname with illegal local part" => { + "should not use pname with illegal local part" => { input: %( @prefix db: . @prefix dbo: . - db:Michael_Jackson dbo:artistOf . + db:Michael_Jackson dbo:artistOf . ), regexp: [ %r(^@prefix db: \.$), - %r(^db:Michael_Jackson dbo:artistOf \.$) + %r(^db:Michael_Jackson dbo:artistOf \.$) ], prefixes: { "db" => RDF::URI("http://dbpedia.org/resource/"), @@ -148,41 +150,49 @@ end end - describe "lists" do + describe "collections" do { "bare list": { input: %(@prefix ex: . (ex:a ex:b) .), - regexp: [%r(^\(\s*ex:a ex:b\s*\) \.$)] + regexp: [%r(^\(\s*ex:a ex:b\s*\) \.$)], + list_terms: true }, "literal list": { input: %(@prefix ex: . ex:a ex:b ( "apple" "banana" ) .), - regexp: [%r(^ex:a ex:b \(\s*"apple" "banana"\s*\) \.$)] + regexp: [%r(^ex:a ex:b \(\s*"apple" "banana"\s*\) \.$)], + list_terms: true }, "empty list": { input: %(@prefix ex: . ex:a ex:b () .), regexp: [%r(^ex:a ex:b \(\s*\) \.$)], - prefixes: { "" => RDF::Vocab::FOAF} + prefixes: { "" => RDF::Vocab::FOAF}, + list_terms: true }, "should generate empty list(2)" => { input: %(@prefix : . :emptyList = () .), regexp: [%r(^:emptyList (<.*sameAs>|owl:sameAs|=) \(\) \.$)], - prefixes: { "" => "http://xmlns.com/foaf/0.1/"} + prefixes: { "" => "http://xmlns.com/foaf/0.1/"}, + list_terms: true }, "empty list as subject": { input: %(@prefix ex: . () ex:a ex:b .), - regexp: [%r(^\(\s*\) ex:a ex:b \.$)] + regexp: [%r(^\(\s*\) ex:a ex:b \.$)], + list_terms: true }, "list as subject": { input: %(@prefix ex: . (ex:a) ex:b ex:c .), - regexp: [%r(^\(\s*ex:a\s*\) ex:b ex:c \.$)] + regexp: [%r(^\(\s*ex:a\s*\) ex:b ex:c \.$)], + list_terms: true }, "list of empties": { input: %(@prefix ex: . [ex:listOf2Empties (() ())] .), - regexp: [%r(\[\s*ex:listOf2Empties \(\s*\(\s*\) \(\s*\)\s*\)\s*\] \.$)] + regexp: [%r(\[\s*ex:listOf2Empties \(\s*\(\s*\) \(\s*\)\s*\)\s*\] \.$)], + list_terms: true }, "list anon": { input: %(@prefix ex: . [ex:twoAnons ([a ex:mother] [a ex:father])] .), - regexp: [%r(\[\s*ex:twoAnons \(\s*\[\s*a ex:mother\s*\] \[\s*a ex:father\s*\]\)\] \.$)] + regexp: [%r(\[\s*ex:twoAnons \(\s*\[\s*a ex:mother\s*\] \[\s*a ex:father\s*\]\)\] \.$)], + list_terms: true }, "list subjects": { input: %(@prefix ex: . (ex:a ex:b) . ex:a a ex:Thing . ex:b a ex:Thing .), @@ -190,7 +200,15 @@ %r(\(ex:a ex:b\) \.), %r(ex:a a ex:Thing \.), %r(ex:b a ex:Thing \.), - ] + ], + list_terms: true + }, + "embedded list": { + input: %{((:q)) a :Thing .}, + regexp: [ + %r{\(\(:q\)\) a :Thing \.} + ], + list_terms: true }, "owl:unionOf list": { input: %( @@ -250,24 +268,24 @@ ], standard_prefixes: true }, - "list pattern with extra properties": { - input: %( - _:a . - _:a "a" . - _:a _:b . - _:b "b" . - _:a "This list node has also properties other than rdf:first and rdf:rest" . - _:b _:c . - _:c "c" . - _:c . - ), - regexp: [%r( \[), - %r( "This list node has also properties other than rdf:first and rdf:rest";), - %r(rdf:first "a";), - %r(rdf:rest \(\s*"b" "c"\s*\)), - ], - standard_prefixes: true - }, + #"list pattern with extra properties": { + # input: %( + # _:a . + # _:a "a" . + # _:a _:b . + # _:b "b" . + # _:a "This list node has also properties other than rdf:first and rdf:rest" . + # _:b _:c . + # _:c "c" . + # _:c . + # ), + # regexp: [%r( \[), + # %r( "This list node has also properties other than rdf:first and rdf:rest";), + # %r(rdf:first "a";), + # %r(rdf:rest \(\s*"b" "c"\s*\)), + # ], + # standard_prefixes: true + #}, "list with empty list": { input: %( _:l1 . @@ -281,21 +299,35 @@ }, "list with multiple lists": { input: %( - _:l1 . - _:a "a" . - _:a . - _:b "b" . - _:b . - _:l1 _:a . - _:l1 _:l2 . - _:l2 _:b . - _:l2 . + _:l1 . + _:a "a" . + _:a . + _:b "b" . + _:b . + _:l1 _:a . + _:l1 _:l2 . + _:l2 _:b . + _:l2 . ), regexp: [ %r( \(\s*\(\s*"a"\) \(\s*"b"\)\) .) ], standard_prefixes: true }, + "list with formula": { + input: %( + @prefix log: . + { + ({:sky :color :blue} {:sky :color :green}) log:conjunction ?F + } => { ?F a :result} . + ), + regexp: [ + %r[{\s+\(\s*{\s*:sky :color :blue \.\s+}\s+{]m, + %r[{\s+:sky :color :green \.\s+}\s*\)]m, + %r[}\)\s+log:conjunction\s+\?F\s+\.\s+} =>]m, + %r[=>\s+{\s+\?F a :result \.\s*}]m + ] + }, }.each do |name, params| it name do serialize(params[:input], params[:regexp], **params) @@ -440,13 +472,13 @@ describe "xsd:double" do [ - [%q("1.0e1"^^xsd:double), /1.0e1 ./], - [%q(1.0e1), /1.0e1 ./], - [%q("0.1e1"^^xsd:double), /1.0e0 ./], - [%q(0.1e1), /1.0e0 ./], - [%q("10.02e1"^^xsd:double), /1.002e2 ./], - [%q(10.02e1), /1.002e2 ./], - [%q("14"^^xsd:double), /1.4e1 ./], + [%q("1.0e1"^^xsd:double), /1.0E1 ./], + [%q(1.0e1), /1.0E1 ./], + [%q("0.1e1"^^xsd:double), /1.0E0 ./], + [%q(0.1e1), /1.0E0 ./], + [%q("10.02e1"^^xsd:double), /1.002E2 ./], + [%q(10.02e1), /1.002E2 ./], + [%q("14"^^xsd:double), /1.4E1 ./], ].each do |(l,r)| it "uses token for #{l.inspect}" do ttl = %(@prefix xsd: . #{l} .) @@ -458,15 +490,15 @@ end [ - [0, "0.0e0"], - [10, "1.0e1"], - [-1, "-1.0e0"], - ["0", "0.0e0"], - ["10", "1.0e1"], - ["-1", "-1.0e0"], - ["1.0", "1.0e0"], - ["0.1", "1.0e-1"], - ["10.01", "1.001e1"], + [0, "0.0E0"], + [10, "1.0E1"], + [-1, "-1.0E0"], + ["0", "0.0E0"], + ["10", "1.0E1"], + ["-1", "-1.0E0"], + ["1.0", "1.0E0"], + ["0.1", "1.0E-1"], + ["10.01", "1.001E1"], ["true", %{"true"^^}], ["false", %{"false"^^}], ["string", %{"string"^^}], @@ -483,13 +515,13 @@ "empty subject" => { input: %({} .), regexp: [ - %r(\[ \] \.) + %r({} \.) ] }, "empty object" => { input: %( {} .), regexp: [ - %r( \[\] \.) + %r( {} \.) ] }, "as subject with constant content" => { @@ -507,7 +539,7 @@ "implies" => { input: %({ _:x :is _:x } => {_:x :is _:x } .), regexp: [ - %r({\s+_:x(_\d+_\d+) :is _:x\1 \.\s+} => {\s+_:x(_\d+_\d+) :is _:x\2 \.\s+} \.)m + %r({\s+_:b0 :is _:b0 \.\s+} => {\s+_:b1 :is _:b1 \.\s+} \.)m ] }, "formula simple" => { @@ -561,7 +593,19 @@ %r(} \.), ], input_format: :trig - } + }, + "implication" => { + input: %q( + @prefix xsd: . + ("17"^^xsd:integer) a <#TestCase> . + { ( ?x ) a :TestCase} => { ?x a :RESULT } . + ), + regexp: [ + %r{\(17\) a :TestCase \.}, + %r{\(?x\) a :TestCase \.}, + %r{\?x a :RESULT \.}, + ] + }, }.each do |name, params| it name do serialize(params[:input], params[:regexp], **params) @@ -574,22 +618,21 @@ "@forAll": { input: %(@forAll :o. :s :p :o .), regexp: [ - %r(@forAll :o_\d+_\d+ \.), - %r(:s :p :o_\d+_\d+ \.), + %r(@forAll :o \.), + %r(:s :p :o \.), ] }, "@forSome": { input: %(@forSome :o. :s :p :o .), regexp: [ - %r(@forSome :o_\d+_\d+ \.), - %r(:s :p :o_\d+_\d+ \.), + %r(@forSome :o \.), + %r(:s :p :o \.), ] }, "?o": { input: %(:s :p ?o .), regexp: [ - %r(@forAll :o_\d+_\d+ \.), - %r(:s :p :o_\d+_\d+ \.), + %r(:s :p \?o \.), ] }, }.each do |name, params| @@ -599,49 +642,86 @@ end end + describe "results" do + { + "r1": { + input: %( + ( "one" "two" ) a :whatever. + "one" a :SUCCESS. + "two" a :SUCCESS. + ), + regexp: [ + %r(\(\s*"one"\s+"two"\s*\) a :whatever\s*\.), + %r("one" a :SUCCESS \.), + %r("two" a :SUCCESS \.), + ] + }, + }.each do |name, params| + it name do + serialize(params[:input], params[:regexp], list_terms: true, **params) + end + end + end + # W3C TriG Test suite describe "w3c n3 parser tests" do require_relative 'suite_helper' - Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/n3/tests/manifest-parser.n3") do |m| + Fixtures::SuiteTest::Manifest.open("https://w3c.github.io/N3/tests/N3Tests/manifest.ttl") do |m| describe m.comment do m.entries.each do |t| - next unless t.positive_test? && t.evaluate? + next if t.negative_test? || t.rejected? specify "#{t.name}: #{t.comment} (action)" do case t.name - when *%w(n3_10003 n3_10004 n3_10008) - skip "Blank Node predicates" - when *%w(n3_10012 n3_10017) + when *%w(cwm_syntax_path2.n3 + cwm_includes_quantifiers_limited.n3 + cwm_includes_builtins.n3 + cwm_list_unify5-ref.n3 + cwm_other_lists-simple.n3 cwm_syntax_qual-after-user.n3 + cwm_other_lists.n3 # empty list with extra properties + new_syntax_inverted_properties.n3 + cwm_other_dec-div.n3 cwm_syntax_sep-term.n3 + ) pending "Investigate" - when *%w(n3_10013) + when *%w(cwm_math_trigo.ref.n3 + cwm_syntax_decimal.n3 cwm_syntax_decimal-ref.n3) pending "Number syntax" + when *%w(cwm_syntax_bad-preds-formula.n3 + cwm_syntax_path2.n3) + pending "Anonymous properties" + when *%w(cwm_syntax_too-nested.n3 cwm_list_unify4.n3) + skip("stack overflow") + when *%w(manifest.ttl cwm_math_math-test.n3 cwm_other_daml-ex.n3 + cwm_math_math-test.n3 cwm_syntax_this-quantifiers-ref.n3) + skip("too long") end + logger.info t.inspect logger.info "source: #{t.input}" - repo = parse(t.input, base_uri: t.base) - n3 = serialize(repo, [], base_uri: t.base, standard_prefixes: true) - logger.info "serialized: #{n3}" - g2 = parse(n3, base_uri: t.base) - expect(g2).to be_equivalent_graph(repo, logger: logger) + repo = parse(t.input, base_uri: t.base, logger: false) + logger.info("sxp: #{SXP::Generator.string(repo.to_sxp_bin)}") + n3 = serialize(repo, [], base_uri: t.base, standard_prefixes: true, logger: logger) + g2 = parse(n3, validate: true, base_uri: t.base, logger: logger) + expect(g2.count).to produce(repo.count, logger) + expect(g2.isomorphic?(repo)).to produce(true, logger) end + next if t.syntax? || t.reason? specify "#{t.name}: #{t.comment} (result)" do case t.name - when *%w(n3_10003 n3_10004 n3_10008) - skip "Blank Node predicates" - when *%w(n3_10012 n3_10017) + when *%w(cwm_syntax_path2.n3) pending "Investigate" - when *%w(n3_10013) - pending "Number syntax" + when *%w(cwm_syntax_too-nested.n3) + skip("stack overflow") end + logger.info t.inspect logger.info "source: #{t.expected}" format = detect_format(t.expected) - repo = parse(t.expected, base_uri: t.base, format: format) - n3 = serialize(repo, [], base_uri: t.base, standard_prefixes: true) - logger.info "serialized: #{n3}" - g2 = parse(n3, base_uri: t.base) - expect(g2).to be_equivalent_graph(repo, logger: logger) + repo = parse(t.expected, base_uri: t.base, format: format, logger: false) + n3 = serialize(repo, [], base_uri: t.base, standard_prefixes: true, logger: logger) + g2 = parse(n3, validate: true, base_uri: t.base, logger: false) + expect(g2.isomorphic?(repo)).to produce(true, logger) end end end @@ -649,18 +729,18 @@ end unless ENV['CI'] def parse(input, format: :n3, **options) - repo = RDF::Repository.new + repo = [].extend(RDF::Enumerable, RDF::Queryable) reader = RDF::Reader.for(format) - repo << reader.new(input, **options) + reader.new(input, **options).each_statement {|st| repo << st} repo end # Serialize ntstr to a string and compare against regexps def serialize(ntstr, regexps = [], base_uri: nil, **options) prefixes = options[:prefixes] || {} - g = ntstr.is_a?(RDF::Enumerable) ? ntstr : parse(ntstr, base_uri: base_uri, prefixes: prefixes, validate: false, logger: [], format: options.fetch(:input_format, :n3)) + g = ntstr.is_a?(RDF::Enumerable) ? ntstr : parse(ntstr, base_uri: base_uri, prefixes: prefixes, validate: false, logger: false, format: options.fetch(:input_format, :n3)) result = RDF::N3::Writer.buffer(**options.merge(logger: logger, base_uri: base_uri, prefixes: prefixes)) do |writer| - writer << g + g.each_statement {|st| writer << st} end if $verbose require 'cgi'