diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0589f05..a5767f0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,16 +19,10 @@ jobs: strategy: fail-fast: false matrix: - ruby: - - 2.6 - - 2.7 - - "3.0" - - 3.1 - - ruby-head - - jruby + ruby: [2.6, 2.7, '3.0', 3.1, 3.2, ruby-head, jruby] steps: - name: Clone repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Ruby uses: ruby/setup-ruby@v1 with: @@ -48,15 +42,14 @@ jobs: runs-on: windows-latest env: CI: true - ALLOW_FAILURES: ${{ endsWith(matrix.ruby, 'head') || matrix.ruby == 'jruby' }} + ALLOW_FAILURES: ${{ matrix.ruby == '3.2' || matrix.ruby == 'jruby' }} strategy: fail-fast: false matrix: - ruby: - - 3.1 + ruby: [3.1, 3.2] steps: - name: Clone repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Ruby uses: ruby/setup-ruby@v1 with: diff --git a/.github/workflows/generate-docs.yml b/.github/workflows/generate-docs.yml index b8d16ed..65aea93 100644 --- a/.github/workflows/generate-docs.yml +++ b/.github/workflows/generate-docs.yml @@ -10,7 +10,7 @@ jobs: name: Update gh-pages with docs steps: - name: Clone repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Ruby uses: ruby/setup-ruby@v1 with: diff --git a/VERSION b/VERSION index b347b11..351227f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.2.3 +3.2.4 diff --git a/json-ld.gemspec b/json-ld.gemspec index 37d9eed..2e92c46 100755 --- a/json-ld.gemspec +++ b/json-ld.gemspec @@ -32,24 +32,24 @@ Gem::Specification.new do |gem| gem.required_ruby_version = '>= 2.6' gem.requirements = [] - gem.add_runtime_dependency 'rdf', '~> 3.2', '>= 3.2.9' + gem.add_runtime_dependency 'rdf', '~> 3.2', '>= 3.2.10' gem.add_runtime_dependency 'multi_json', '~> 1.15' gem.add_runtime_dependency 'link_header', '~> 0.0', '>= 0.0.8' gem.add_runtime_dependency 'json-canonicalization', '~> 0.3' - gem.add_runtime_dependency 'htmlentities', '~> 4.3' - gem.add_runtime_dependency 'rack', '~> 2.2' + gem.add_runtime_dependency 'htmlentities', '~> 4.3' + gem.add_runtime_dependency "rack", '>= 2.2', '< 4' gem.add_development_dependency 'sinatra-linkeddata','~> 3.2' - gem.add_development_dependency 'jsonlint', '~> 0.3' unless is_java - gem.add_development_dependency 'oj', '~> 3.13' unless is_java + gem.add_development_dependency 'jsonlint', '~> 0.4' unless is_java + gem.add_development_dependency 'oj', '~> 3.14' unless is_java gem.add_development_dependency 'yajl-ruby', '~> 1.4' unless is_java - gem.add_development_dependency 'rack-test', '~> 1.1' + gem.add_development_dependency 'rack-test', '>= 1.1', '< 3' gem.add_development_dependency 'rdf-isomorphic', '~> 3.2' gem.add_development_dependency 'rdf-spec', '~> 3.2' gem.add_development_dependency 'rdf-trig', '~> 3.2' gem.add_development_dependency 'rdf-turtle', '~> 3.2' gem.add_development_dependency 'rdf-vocab', '~> 3.2' gem.add_development_dependency 'rdf-xsd', '~> 3.2' - gem.add_development_dependency 'rspec', '~> 3.11' + gem.add_development_dependency 'rspec', '~> 3.12' gem.add_development_dependency 'rspec-its', '~> 1.3' gem.add_development_dependency 'yard' , '~> 0.9' diff --git a/lib/json/ld/api.rb b/lib/json/ld/api.rb index 274b07b..9a8e869 100644 --- a/lib/json/ld/api.rb +++ b/lib/json/ld/api.rb @@ -66,6 +66,7 @@ class API # @param [String, #read, Hash, Array, JSON::LD::Context] context # An external context to use additionally to the context embedded in input when expanding the input. # @param [Hash{Symbol => Object}] options + # @option options [Symbol] :adapter used with MultiJson # @option options [RDF::URI, String, #to_s] :base # The Base IRI to use when expanding the document. This overrides the value of `input` if it is a _IRI_. If not specified and `input` is not an _IRI_, the base IRI defaults to the current document IRI if in a browser context, or the empty string if there is no document context. If not specified, and a base IRI is found from `input`, options[:base] will be modified with this value. # @option options [Boolean] :compactArrays (true) @@ -74,10 +75,10 @@ class API # Creates document relative IRIs when compacting, if `true`, otherwise leaves expanded. # @option options [Proc] :documentLoader # The callback of the loader to be used to retrieve remote documents and contexts. If specified, it must be used to retrieve remote documents and contexts; otherwise, if not specified, the processor's built-in loader must be used. See {documentLoader} for the method signature. - # @option options [Boolean] :lowercaseLanguage - # By default, language tags are left as is. To normalize to lowercase, set this option to `true`. # @option options [String, #read, Hash, Array, JSON::LD::Context] :expandContext # A context that is used to initialize the active context when expanding a document. + # @option options [Boolean] :extendedRepresentation (false) + # Use the extended internal representation. # @option options [Boolean] :extractAllScripts # If set, when given an HTML input without a fragment identifier, extracts all `script` elements with type `application/ld+json` into an array during expansion. # @option options [Boolean, String, RDF::URI] :flatten @@ -86,19 +87,19 @@ class API # When set, this has the effect of inserting a context definition with `@language` set to the associated value, creating a default language for interpreting string values. # @option options [Symbol] :library # One of :nokogiri or :rexml. If nil/unspecified uses :nokogiri if available, :rexml otherwise. + # @option options [Boolean] :lowercaseLanguage + # By default, language tags are left as is. To normalize to lowercase, set this option to `true`. + # @option options [Boolean] :ordered (true) + # Order traversal of dictionary members by key when performing algorithms. # @option options [String] :processingMode # Processing mode, json-ld-1.0 or json-ld-1.1. - # If `processingMode` is not specified, a mode of `json-ld-1.0` or `json-ld-1.1` is set, the context used for `expansion` or `compaction`. - # @option options [Boolean] rdfstar (false) + # @option options [Boolean] :rdfstar (false) # support parsing JSON-LD-star statement resources. # @option options [Boolean] :rename_bnodes (true) # Rename bnodes as part of expansion, or keep them the same. # @option options [Boolean] :unique_bnodes (false) # Use unique bnode identifiers, defaults to using the identifier which the node was originally initialized with (if any). - # @option options [Symbol] :adapter used with MultiJson # @option options [Boolean] :validate Validate input, if a string or readable object. - # @option options [Boolean] :ordered (true) - # Order traversal of dictionary members by key when performing algorithms. # @yield [api] # @yieldparam [API] # @raise [JsonLdError] @@ -235,7 +236,7 @@ def self.compact(input, context, expanded: false, serializer: nil, **options) end API.new(expanded_input, context, no_default_base: true, **options) do - log_debug(".compact") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".compact") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} result = compact(value) # xxx) Add the given context to the output @@ -289,7 +290,7 @@ def self.flatten(input, context, expanded: false, serializer: nil, **options) # Initialize input using API.new(expanded_input, context, no_default_base: true, **options) do - log_debug(".flatten") {"expanded input: #{value.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".flatten") {"expanded input: #{value.to_json(JSON_STATE) rescue 'malformed json'}"} # Rename blank nodes recusively. Note that this does not create new blank node identifiers where none exist, which is performed in the node map generation algorithm. @value = rename_bnodes(@value) if @options[:rename_bnodes] @@ -410,8 +411,8 @@ def self.frame(input, frame, expanded: false, serializer: nil, **options) # Initialize input using frame as context API.new(expanded_input, frame['@context'], no_default_base: true, **options) do - log_debug(".frame") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} - log_debug(".frame") {"expanded frame: #{expanded_frame.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".frame") {"expanded input: #{expanded_input.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".frame") {"expanded frame: #{expanded_frame.to_json(JSON_STATE) rescue 'malformed json'}"} if %w(@first @last).include?(options[:embed]) && context.processingMode('json-ld-1.1') raise JSON::LD::JsonLdError::InvalidEmbedValue, "#{options[:embed]} is not a valid value of @embed in 1.1 mode" if @options[:validate] @@ -458,7 +459,7 @@ def self.frame(input, frame, expanded: false, serializer: nil, **options) # Replace values with `@preserve` with the content of its entry. result = cleanup_preserve(result) - log_debug(".frame") {"expanded result: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".frame") {"expanded result: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} # Compact result compacted = compact(result) @@ -477,7 +478,7 @@ def self.frame(input, frame, expanded: false, serializer: nil, **options) # Only add context if one was provided result = context.serialize(provided_context: frame).merge(result) if frame['@context'] - log_debug(".frame") {"after compact: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".frame") {"after compact: #{result.to_json(JSON_STATE) rescue 'malformed json'}"} result end @@ -518,7 +519,7 @@ def self.toRdf(input, expanded: false, **options, &block) API.new(flattened_input, nil, **options) do # 1) Perform the Expansion Algorithm on the JSON-LD input. # This removes any existing context to allow the given context to be cleanly applied. - log_debug(".toRdf") {"flattened input: #{flattened_input.to_json(JSON_STATE) rescue 'malformed json'}"} + # log_debug(".toRdf") {"flattened input: #{flattened_input.to_json(JSON_STATE) rescue 'malformed json'}"} # Recurse through input flattened_input.each do |node| @@ -527,7 +528,7 @@ def self.toRdf(input, expanded: false, **options, &block) # Drop invalid statements (other than IRIs) unless statement.valid_extended? - log_debug(".toRdf") {"drop invalid statement: #{statement.to_nquads}"} + # log_debug(".toRdf") {"drop invalid statement: #{statement.to_nquads}"} next end @@ -562,6 +563,7 @@ def self.fromRdf(input, useRdfType: false, useNativeTypes: false, serializer: ni API.new(nil, nil, **options) do result = from_statements(input, + extendedRepresentation: options[:extendedRepresentation], useRdfType: useRdfType, useNativeTypes: useNativeTypes) end @@ -574,6 +576,9 @@ def self.fromRdf(input, useRdfType: false, useNativeTypes: false, serializer: ni # Uses built-in or provided documentLoader to retrieve a parsed document. # # @param [RDF::URI, String] url + # @param [Regexp] allowed_content_types + # A regular expression matching other content types allowed + # beyond types for JSON and HTML. # @param [String, RDF::URI] base # Location to use as documentUrl instead of `url`. # @option options [Proc] :documentLoader @@ -594,6 +599,7 @@ def self.fromRdf(input, useRdfType: false, useNativeTypes: false, serializer: ni # If a block is given, the result of evaluating the block is returned, otherwise, the retrieved remote document and context information unless block given # @raise [JsonLdError] def self.loadRemoteDocument(url, + allowed_content_types: nil, base: nil, documentLoader: nil, extractAllScripts: false, @@ -674,7 +680,8 @@ def self.loadRemoteDocument(url, if remote_doc.contentType && validate raise IOError, "url: #{url}, contentType: #{remote_doc.contentType}" unless - remote_doc.contentType.match?(/application\/(.+\+)?json|text\/html|application\/xhtml\+xml/) + remote_doc.contentType.match?(/application\/(.+\+)?json|text\/html|application\/xhtml\+xml/) || + (allowed_content_types && remote_doc.contentType.match?(allowed_content_types)) end block_given? ? yield(remote_doc) : remote_doc end diff --git a/lib/json/ld/compact.rb b/lib/json/ld/compact.rb index e5fb177..0ec5551 100644 --- a/lib/json/ld/compact.rb +++ b/lib/json/ld/compact.rb @@ -21,14 +21,14 @@ def compact(element, base: nil, property: nil, log_depth: nil) - log_debug("compact", depth: log_depth.to_i) {"element: #{element.inspect}, ec: #{context.inspect}"} + # log_debug("compact", depth: log_depth.to_i) {"element: #{element.inspect}, ec: #{context.inspect}"} # If the term definition for active property itself contains a context, use that for compacting values. input_context = self.context case element when Array - #log_debug("") {"Array #{element.inspect}"} + # log_debug("") {"Array #{element.inspect}"} result = element.map do |item| compact(item, base: base, property: property, log_depth: log_depth.to_i + 1) end.compact @@ -38,10 +38,10 @@ def compact(element, # member; otherwise the compacted value is element if result.length == 1 && !context.as_array?(property) && @options[:compactArrays] - log_debug("=> extract single element", depth: log_depth.to_i) {result.first.inspect} + # log_debug("=> extract single element", depth: log_depth.to_i) {result.first.inspect} result.first else - log_debug("=> array result", depth: log_depth.to_i) {result.inspect} + # log_debug("=> array result", depth: log_depth.to_i) {result.inspect} result end when Hash @@ -52,7 +52,7 @@ def compact(element, # Revert any previously type-scoped (non-preserved) context if context.previous_context && !element.key?('@value') && element.keys != %w(@id) - log_debug("revert ec", depth: log_depth.to_i) {"previous context: #{context.previous_context.inspect}"} + # log_debug("revert ec", depth: log_depth.to_i) {"previous context: #{context.previous_context.inspect}"} self.context = context.previous_context end @@ -61,13 +61,13 @@ def compact(element, if td && !td.context.nil? self.context = context.parse(td.context, override_protected: true) - log_debug("prop-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} + # log_debug("prop-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} end if (element.key?('@id') || element.key?('@value')) && !element.key?('@annotation') result = context.compact_value(property, element, base: @options[:base]) if !result.is_a?(Hash) || context.coerce(property) == '@json' - log_debug("", depth: log_depth.to_i) {"=> scalar result: #{result.inspect}"} + # log_debug("", depth: log_depth.to_i) {"=> scalar result: #{result.inspect}"} return result end end @@ -90,12 +90,12 @@ def compact(element, each do |term| term_context = input_context.term_definitions[term].context if input_context.term_definitions[term] self.context = context.parse(term_context, propagate: false) unless term_context.nil? - log_debug("type-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} + # log_debug("type-scoped", depth: log_depth.to_i) {"context: #{self.context.inspect}"} end element.keys.opt_sort(ordered: @options[:ordered]).each do |expanded_property| expanded_value = element[expanded_property] - log_debug("", depth: log_depth.to_i) {"#{expanded_property}: #{expanded_value.inspect}"} + # log_debug("", depth: log_depth.to_i) {"#{expanded_property}: #{expanded_value.inspect}"} if expanded_property == '@id' compacted_value = as_array(expanded_value).map do |expanded_id| @@ -134,7 +134,7 @@ def compact(element, compacted_value = compact(expanded_value, base: base, property: '@reverse', log_depth: log_depth.to_i + 1) - log_debug("@reverse", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} + # log_debug("@reverse", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} # handle double-reversed properties compacted_value.each do |prop, value| if context.reverse?(prop) @@ -146,7 +146,7 @@ def compact(element, unless compacted_value.empty? al = context.compact_iri('@reverse', vocab: true) - log_debug("", depth: log_depth.to_i) {"remainder: #{al} => #{compacted_value.inspect}"} + # log_debug("", depth: log_depth.to_i) {"remainder: #{al} => #{compacted_value.inspect}"} result[al] = compacted_value end next @@ -157,7 +157,7 @@ def compact(element, compacted_value = compact(expanded_value, base: base, property: property, log_depth: log_depth.to_i + 1) - log_debug("@preserve", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} + # log_debug("@preserve", depth: log_depth.to_i) {"compacted_value: #{compacted_value.inspect}"} unless compacted_value.is_a?(Array) && compacted_value.empty? result['@preserve'] = compacted_value @@ -166,14 +166,14 @@ def compact(element, end if expanded_property == '@index' && context.container(property).include?('@index') - log_debug("@index", depth: log_depth.to_i) {"drop @index"} + # log_debug("@index", depth: log_depth.to_i) {"drop @index"} next end # Otherwise, if expanded property is @direction, @index, @value, or @language: if EXPANDED_PROPERTY_DIRECTION_INDEX_LANGUAGE_VALUE.include?(expanded_property) al = context.compact_iri(expanded_property, vocab: true) - log_debug(expanded_property, depth: log_depth.to_i) {"#{al} => #{expanded_value.inspect}"} + # log_debug(expanded_property, depth: log_depth.to_i) {"#{al} => #{expanded_value.inspect}"} result[al] = expanded_value next end @@ -223,7 +223,7 @@ def compact(element, compacted_item = compact(value, base: base, property: item_active_property, log_depth: log_depth.to_i + 1) - log_debug("", depth: log_depth.to_i) {" => compacted key: #{item_active_property.inspect} for #{compacted_item.inspect}"} + # log_debug("", depth: log_depth.to_i) {" => compacted key: #{item_active_property.inspect} for #{compacted_item.inspect}"} # handle @list if list?(expanded_item) @@ -345,7 +345,7 @@ def compact(element, result else # For other types, the compacted value is the element value - log_debug("compact", depth: log_depth.to_i) {element.class.to_s} + # log_debug("compact", depth: log_depth.to_i) {element.class.to_s} element end diff --git a/lib/json/ld/context.rb b/lib/json/ld/context.rb index 1743785..868a14b 100644 --- a/lib/json/ld/context.rb +++ b/lib/json/ld/context.rb @@ -212,7 +212,7 @@ def initialize(**options) self.default_language = options[:language] if options[:language] =~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ @term_definitions = options[:term_definitions] if options[:term_definitions] - #log_debug("init") {"iri_to_term: #{iri_to_term.inspect}"} + # log_debug("init") {"iri_to_term: #{iri_to_term.inspect}"} yield(self) if block_given? end @@ -267,13 +267,13 @@ def parse(local_context, "Attempt to clear a context with protected terms" end when Context - log_debug("parse") {"context: #{context.inspect}"} + # log_debug("parse") {"context: #{context.inspect}"} result = result.merge(context) when IO, StringIO - log_debug("parse") {"io: #{context}"} + # log_debug("parse") {"io: #{context}"} # Load context document, if it is an open file begin - ctx = JSON.load(context) + ctx = load_context(context, **@options) raise JSON::LD::JsonLdError::InvalidRemoteContext, "Context missing @context key" if @options[:validate] && ctx['@context'].nil? result = result.parse(ctx["@context"] ? ctx["@context"] : {}) rescue JSON::ParserError => e @@ -282,7 +282,7 @@ def parse(local_context, self end when String, RDF::URI - log_debug("parse") {"remote: #{context}, base: #{result.context_base || result.base}"} + # log_debug("parse") {"remote: #{context}, base: #{result.context_base || result.base}"} # 3.2.1) Set context to the result of resolving value against the base IRI which is established as specified in section 5.1 Establishing a Base URI of [RFC3986]. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987]. context = RDF::URI(result.context_base || base).join(context) @@ -297,13 +297,14 @@ def parse(local_context, cached_context = if PRELOADED[context_canon.to_s] # If we have a cached context, merge it into the current context (result) and use as the new context - log_debug("parse") {"=> cached_context: #{context_canon.to_s.inspect}"} + # log_debug("parse") {"=> cached_context: #{context_canon.to_s.inspect}"} # If this is a Proc, then replace the entry with the result of running the Proc if PRELOADED[context_canon.to_s].respond_to?(:call) - log_debug("parse") {"=> (call)"} + # log_debug("parse") {"=> (call)"} PRELOADED[context_canon.to_s] = PRELOADED[context_canon.to_s].call end + PRELOADED[context_canon.to_s].context_base ||= context_canon.to_s PRELOADED[context_canon.to_s] else # Load context document, if it is a string @@ -478,7 +479,7 @@ def create_term_definition(local_context, term, defined, remote_contexts: [], validate_scoped: true) # Expand a string value, unless it matches a keyword - log_debug("create_term_definition") {"term = #{term.inspect}"} + # log_debug("create_term_definition") {"term = #{term.inspect}"} # If defined contains the key term, then the associated value must be true, indicating that the term definition has already been created, so return. Otherwise, a cyclical term definition has been detected, which is an error. case defined[term] @@ -523,7 +524,7 @@ def create_term_definition(local_context, term, defined, raise JsonLdError::InvalidTermDefinition, "Term definition for #{term.inspect} is an #{value.class} on term #{term.inspect}" unless value.is_a?(Hash) - #log_debug("") {"Hash[#{term.inspect}] = #{value.inspect}"} + # log_debug("") {"Hash[#{term.inspect}] = #{value.inspect}"} definition = TermDefinition.new(term) definition.simple = simple_term @@ -565,7 +566,7 @@ def create_term_definition(local_context, term, defined, elsif !JSON_LD_10_TYPE_VALUES.include?(type) && !(type.is_a?(RDF::URI) && type.absolute?) raise JsonLdError::InvalidTypeMapping, "unknown mapping for '@type': #{type.inspect} on term #{term.inspect}" end - #log_debug("") {"type_mapping: #{type.inspect}"} + # log_debug("") {"type_mapping: #{type.inspect}"} definition.type_mapping = type end @@ -649,7 +650,7 @@ def create_term_definition(local_context, term, defined, # Otherwise, term is an absolute IRI. Set the IRI mapping for definition to term term end - log_debug("") {"=> #{definition.id}"} + # log_debug("") {"=> #{definition.id}"} elsif term.include?('/') # If term is a relative IRI definition.id = expand_iri(term, vocab: true) @@ -662,13 +663,13 @@ def create_term_definition(local_context, term, defined, # Otherwise, active context must have a vocabulary mapping, otherwise an invalid value has been detected, which is an error. Set the IRI mapping for definition to the result of concatenating the value associated with the vocabulary mapping and term. raise JsonLdError::InvalidIRIMapping, "relative term definition without vocab: #{term} on term #{term.inspect}" unless vocab definition.id = vocab + term - log_debug("") {"=> #{definition.id}"} + # log_debug("") {"=> #{definition.id}"} end @iri_to_term[definition.id] = term if simple_term && definition.id if value.key?('@container') - #log_debug("") {"container_mapping: #{value['@container'].inspect}"} + # log_debug("") {"container_mapping: #{value['@container'].inspect}"} definition.container_mapping = check_container(value['@container'], local_context, defined, term) # If @container includes @type @@ -702,7 +703,7 @@ def create_term_definition(local_context, term, defined, when nil then [nil] else value['@context'] end - log_debug("") {"context: #{definition.context.inspect}"} + # log_debug("") {"context: #{definition.context.inspect}"} rescue JsonLdError => e raise JsonLdError::InvalidScopedContext, "Term definition for #{term.inspect} contains illegal value for @context: #{e.message}" end @@ -722,14 +723,14 @@ def create_term_definition(local_context, term, defined, else raise JsonLdError::InvalidLanguageMapping, "language must be null or a string, was #{value['@language'].inspect}} on term #{term.inspect}" end - #log_debug("") {"language_mapping: #{language.inspect}"} + # log_debug("") {"language_mapping: #{language.inspect}"} definition.language_mapping = language || false end if value.key?('@direction') direction = value['@direction'] raise JsonLdError::InvalidBaseDirection, "direction must be null, 'ltr', or 'rtl', was #{language.inspect}} on term #{term.inspect}" unless direction.nil? || %w(ltr rtl).include?(direction) - #log_debug("") {"direction_mapping: #{direction.inspect}"} + # log_debug("") {"direction_mapping: #{direction.inspect}"} definition.direction_mapping = direction || false end @@ -737,7 +738,7 @@ def create_term_definition(local_context, term, defined, nest = value['@nest'] raise JsonLdError::InvalidNestValue, "nest must be a string, was #{nest.inspect}} on term #{term.inspect}" unless nest.is_a?(String) raise JsonLdError::InvalidNestValue, "nest must not be a keyword other than @nest, was #{nest.inspect}} on term #{term.inspect}" if nest.match?(/^@[a-zA-Z]+$/) && nest != '@nest' - #log_debug("") {"nest: #{nest.inspect}"} + # log_debug("") {"nest: #{nest.inspect}"} definition.nest = nest end @@ -897,22 +898,21 @@ def propagate=(value, **options) # @param [Hash{Symbol => Object}] options ({}) # @return [Hash] def serialize(provided_context: nil, **options) - #log_debug("serlialize: generate context") - #log_debug("") {"=> context: #{inspect}"} + # log_debug("serlialize: generate context") + # log_debug("") {"=> context: #{inspect}"} use_context = case provided_context when String, RDF::URI - #log_debug "serlialize: reuse context: #{provided_context.inspect}" + # log_debug "serlialize: reuse context: #{provided_context.inspect}" provided_context.to_s when Hash - #log_debug "serlialize: reuse context: #{provided_context.inspect}" + # log_debug "serlialize: reuse context: #{provided_context.inspect}" # If it has an @context entry use it, otherwise it is assumed to be the body of a context provided_context.fetch('@context', provided_context) when Array - #log_debug "serlialize: reuse context: #{provided_context.inspect}" + # log_debug "serlialize: reuse context: #{provided_context.inspect}" provided_context when IO, StringIO - provided_context.rewind - JSON.load(provided_context).fetch('@context', {}) + load_context(provided_context, **@options).fetch('@context', {}) else ctx = {} ctx['@version'] = 1.1 if @processingMode == 'json-ld-1.1' @@ -1015,7 +1015,7 @@ def from_vocabulary(graph) # # @return [TermDefinition] def set_mapping(term, value) - #log_debug("") {"map #{term.inspect} to #{value.inspect}"} + # log_debug("") {"map #{term.inspect} to #{value.inspect}"} term = term.to_s term_definitions[term] = TermDefinition.new(term, id: value, simple: true, prefix: (value.to_s.end_with?(*PREFIX_URI_ENDINGS))) term_definitions[term].simple = true @@ -1467,8 +1467,6 @@ def compact_iri(iri, base: nil, reverse: false, value: nil, vocab: nil) end end - RDF_LITERAL_NATIVE_TYPES = Set.new([RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double]).freeze - ## # If active property has a type mapping in the active context set to @id or @vocab, a JSON object with a single member @id whose value is the result of using the IRI Expansion algorithm on value is returned. # @@ -1491,7 +1489,7 @@ def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base # If the active property has a type mapping in active context that is @id, return a new JSON object containing a single key-value pair where the key is @id and the value is the result of using the IRI Expansion algorithm, passing active context, value, and true for document relative. if value.is_a?(String) && td.type_mapping == '@id' - #log_debug("") {"as relative IRI: #{value.inspect}"} + # log_debug("") {"as relative IRI: #{value.inspect}"} return {'@id' => expand_iri(value, documentRelative: true, base: base).to_s} end @@ -1500,50 +1498,12 @@ def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base return {'@id' => expand_iri(value, vocab: true, documentRelative: true, base: base).to_s} end - value = RDF::Literal(value) if - value.is_a?(Date) || - value.is_a?(DateTime) || - value.is_a?(Time) - result = case value when RDF::URI, RDF::Node {'@id' => value.to_s} - when RDF::Literal - res = {} - if value.datatype == RDF::URI(RDF.to_uri + "JSON") && processingMode('json-ld-1.1') - # Value parsed as JSON - # FIXME: MultiJson - res['@type'] = '@json' - res['@value'] = ::JSON.parse(value.object) - elsif value.datatype.start_with?("https://www.w3.org/ns/i18n#") && rdfDirection == 'i18n-datatype' && processingMode('json-ld-1.1') - lang, dir = value.datatype.fragment.split('_') - res['@value'] = value.to_s - unless lang.empty? - if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ - if options[:validate] - raise JsonLdError::InvalidLanguageMapping, "rdf:language must be valid BCP47: #{lang.inspect}" - else - warn "rdf:language must be valid BCP47: #{lang.inspect}" - end - end - res['@language'] = lang - end - res['@direction'] = dir - elsif useNativeTypes && RDF_LITERAL_NATIVE_TYPES.include?(value.datatype) && value.valid? - res['@type'] = uri(coerce(property)) if coerce(property) - res['@value'] = value.object - else - value.canonicalize! if value.valid? && value.datatype == RDF::XSD.double - if coerce(property) - res['@type'] = uri(coerce(property)).to_s - elsif value.datatype? - res['@type'] = uri(value.datatype).to_s - elsif value.language? || language(property) - res['@language'] = (value.language || language(property)).to_s - end - res['@value'] = value.to_s - end - res + when Date, DateTime, Time + lit = RDF::Literal.new(value) + {'@value' => lit.to_s, '@type' => lit.datatype.to_s} else # Otherwise, initialize result to a JSON object with an @value member whose value is set to value. res = {} @@ -1561,8 +1521,6 @@ def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base end result - rescue ::JSON::ParserError => e - raise JSON::LD::JsonLdError::InvalidJsonLiteral, e.message end ## @@ -1579,7 +1537,7 @@ def expand_value(property, value, useNativeTypes: false, rdfDirection: nil, base # @see https://www.w3.org/TR/json-ld11-api/#value-compaction # FIXME: revisit the specification version of this. def compact_value(property, value, base: nil) - #log_debug("compact_value") {"property: #{property.inspect}, value: #{value.inspect}"} + # log_debug("compact_value") {"property: #{property.inspect}, value: #{value.inspect}"} indexing = index?(value) && container(property).include?('@index') language = language(property) @@ -1588,25 +1546,25 @@ def compact_value(property, value, base: nil) result = case when coerce(property) == '@id' && value.key?('@id') && (value.keys - %w(@id @index)).empty? # Compact an @id coercion - #log_debug("") {" (@id & coerce)"} + # log_debug("") {" (@id & coerce)"} compact_iri(value['@id'], base: base) when coerce(property) == '@vocab' && value.key?('@id') && (value.keys - %w(@id @index)).empty? # Compact an @id coercion - #log_debug("") {" (@id & coerce & vocab)"} + # log_debug("") {" (@id & coerce & vocab)"} compact_iri(value['@id'], vocab: true) when value.key?('@id') - #log_debug("") {" (@id)"} + # log_debug("") {" (@id)"} # return value as is value when value['@type'] && value['@type'] == coerce(property) # Compact common datatype - #log_debug("") {" (@type & coerce) == #{coerce(property)}"} + # log_debug("") {" (@type & coerce) == #{coerce(property)}"} value['@value'] when coerce(property) == '@none' || value['@type'] # use original expanded value value when !value['@value'].is_a?(String) - #log_debug("") {" (native)"} + # log_debug("") {" (native)"} indexing || !index?(value) ? value['@value'] : value when value['@language'].to_s.downcase == language.to_s.downcase && value['@direction'] == direction # Compact language and direction @@ -1627,7 +1585,7 @@ def compact_value(property, value, base: nil) # If the result is an object, tranform keys using any term keyword aliases if result.is_a?(Hash) && result.keys.any? {|k| self.alias(k) != k} - #log_debug("") {" (map to key aliases)"} + # log_debug("") {" (map to key aliases)"} new_element = {} result.each do |k, v| new_element[self.alias(k)] = v @@ -1635,7 +1593,7 @@ def compact_value(property, value, base: nil) result = new_element end - #log_debug("") {"=> #{result.inspect}"} + # log_debug("") {"=> #{result.inspect}"} result end @@ -1731,11 +1689,21 @@ def alias(value) CONTEXT_BASE_FRAG_OR_QUERY = %w(? #).freeze CONTEXT_TYPE_ID_VOCAB = %w(@id @vocab).freeze + ## + # Reads the `@context` from an IO + def load_context(io, **options) + io.rewind + remote_doc = API.loadRemoteDocument(io, **options) + remote_doc.document.is_a?(String) ? + MultiJson.load(remote_doc.document) : + remote_doc.document + end + def uri(value) case value.to_s when /^_:(.*)$/ # Map BlankNodes if a namer is given - #log_debug "uri(bnode)#{value}: #{$1}" + # log_debug "uri(bnode)#{value}: #{$1}" bnode(namer.get_sym($1)) else value = RDF::URI(value) @@ -1854,25 +1822,25 @@ def inverse_context # for the type mapping or language mapping # @return [String] def select_term(iri, containers, type_language, preferred_values) - #log_debug("select_term") { + # log_debug("select_term") { # "iri: #{iri.inspect}, " + # "containers: #{containers.inspect}, " + # "type_language: #{type_language.inspect}, " + # "preferred_values: #{preferred_values.inspect}" #} container_map = inverse_context[iri] - #log_debug(" ") {"container_map: #{container_map.inspect}"} + # log_debug(" ") {"container_map: #{container_map.inspect}"} containers.each do |container| next unless container_map.key?(container) tl_map = container_map[container] value_map = tl_map[type_language] preferred_values.each do |item| next unless value_map.key?(item) - #log_debug("=>") {value_map[item].inspect} + # log_debug("=>") {value_map[item].inspect} return value_map[item] end end - #log_debug("=>") {"nil"} + # log_debug("=>") {"nil"} nil end diff --git a/lib/json/ld/expand.rb b/lib/json/ld/expand.rb index 8805500..d041d5b 100644 --- a/lib/json/ld/expand.rb +++ b/lib/json/ld/expand.rb @@ -29,13 +29,13 @@ module Expand # @return [Array Object}>] def expand(input, active_property, context, framing: false, from_map: false, log_depth: nil) - log_debug("expand", depth: log_depth.to_i) {"input: #{input.inspect}, active_property: #{active_property.inspect}, context: #{context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"input: #{input.inspect}, active_property: #{active_property.inspect}, context: #{context.inspect}"} framing = false if active_property == '@default' expanded_active_property = context.expand_iri(active_property, vocab: true, as_string: true, base: @options[:base]) if active_property # Use a term-specific context, if defined, based on the non-type-scoped context. property_scoped_context = context.term_definitions[active_property].context if active_property && context.term_definitions[active_property] - log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{property_scoped_context.inspect}"} unless property_scoped_context.nil? + # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{property_scoped_context.inspect}"} unless property_scoped_context.nil? result = case input when Array @@ -76,7 +76,7 @@ def expand(input, active_property, context, !(expanded_key_map.values == ['@id']) # If there's a previous context, the context was type-scoped - log_debug("expand", depth: log_depth.to_i) {"previous_context: #{context.previous_context.inspect}"} if revert_context + # log_debug("expand", depth: log_depth.to_i) {"previous_context: #{context.previous_context.inspect}"} if revert_context context = context.previous_context if revert_context end @@ -84,12 +84,12 @@ def expand(input, active_property, context, unless property_scoped_context.nil? context = context.parse(property_scoped_context, base: @options[:base], override_protected: true) end - log_debug("expand", depth: log_depth.to_i) {"after property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? + # log_debug("expand", depth: log_depth.to_i) {"after property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? # If element contains the key @context, set active context to the result of the Context Processing algorithm, passing active context and the value of the @context key as local context. if input.key?('@context') context = context.parse(input['@context'], base: @options[:base]) - log_debug("expand", depth: log_depth.to_i) {"context: #{context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"context: #{context.inspect}"} end # Set the type-scoped context to the context on input, for use later @@ -107,7 +107,7 @@ def expand(input, active_property, context, Array(input[tk]).sort.each do |term| term_context = type_scoped_context.term_definitions[term].context if type_scoped_context.term_definitions[term] unless term_context.nil? - log_debug("expand", depth: log_depth.to_i) {"term_context[#{term}]: #{term_context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"term_context[#{term}]: #{term_context.inspect}"} context = context.parse(term_context, base: @options[:base], propagate: false) end end @@ -121,7 +121,7 @@ def expand(input, active_property, context, type_scoped_context: type_scoped_context, log_depth: log_depth.to_i + 1) - log_debug("output object", depth: log_depth.to_i) {output_object.inspect} + # log_debug("output object", depth: log_depth.to_i) {output_object.inspect} # If result contains the key @value: if value?(output_object) @@ -199,7 +199,7 @@ def expand(input, active_property, context, if (expanded_active_property || '@graph') == '@graph' && (output_object.key?('@value') || output_object.key?('@list') || (output_object.keys - KEY_ID).empty? && !framing) - log_debug(" =>", depth: log_depth.to_i) { "empty top-level: " + output_object.inspect} + # log_debug(" =>", depth: log_depth.to_i) { "empty top-level: " + output_object.inspect} return nil end @@ -219,12 +219,12 @@ def expand(input, active_property, context, base: @options[:base], override_protected: true) end - log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? + # log_debug("expand", depth: log_depth.to_i) {"property_scoped_context: #{context.inspect}"} unless property_scoped_context.nil? context.expand_value(active_property, input, base: @options[:base]) end - log_debug(depth: log_depth.to_i) {" => #{result.inspect}"} + # log_debug(depth: log_depth.to_i) {" => #{result.inspect}"} result end @@ -258,10 +258,10 @@ def expand_object(input, active_property, context, output_object, expanded_property.to_s.start_with?("_:") && context.processingMode('json-ld-1.1') - log_debug("expand property", depth: log_depth.to_i) {"ap: #{active_property.inspect}, expanded: #{expanded_property.inspect}, value: #{value.inspect}"} + # log_debug("expand property", depth: log_depth.to_i) {"ap: #{active_property.inspect}, expanded: #{expanded_property.inspect}, value: #{value.inspect}"} if expanded_property.nil? - log_debug(" => ", depth: log_depth.to_i) {"skip nil property"} + # log_debug(" => ", depth: log_depth.to_i) {"skip nil property"} next end @@ -341,7 +341,7 @@ def expand_object(input, active_property, context, output_object, Array(output_object['@included']) + included_result when '@type' # If expanded property is @type and value is neither a string nor an array of strings, an invalid type value error has been detected and processing is aborted. Otherwise, set expanded value to the result of using the IRI Expansion algorithm, passing active context, true for vocab, and true for document relative to expand the value or each of its items. - log_debug("@type", depth: log_depth.to_i) {"value: #{value.inspect}"} + # log_debug("@type", depth: log_depth.to_i) {"value: #{value.inspect}"} e_type = case value when Array value.map do |v| @@ -516,7 +516,7 @@ def expand_object(input, active_property, context, output_object, # If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps: if value.key?('@reverse') - log_debug("@reverse", depth: log_depth.to_i) {"double reverse: #{value.inspect}"} + # log_debug("@reverse", depth: log_depth.to_i) {"double reverse: #{value.inspect}"} value['@reverse'].each do |property, item| # If result does not have a property member, create one and set its value to an empty array. # Append item to the value of the property member of result. @@ -566,7 +566,7 @@ def expand_object(input, active_property, context, output_object, end # Unless expanded value is null, set the expanded property member of result to expanded value. - log_debug("expand #{expanded_property}", depth: log_depth.to_i) { expanded_value.inspect} + # log_debug("expand #{expanded_property}", depth: log_depth.to_i) { expanded_value.inspect} output_object[expanded_property] = expanded_value unless expanded_value.nil? && expanded_property == '@value' && input_type != '@json' next end @@ -619,7 +619,7 @@ def expand_object(input, active_property, context, output_object, if id_context.nil? context else - log_debug("expand", depth: log_depth.to_i) {"id_context: #{id_context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"id_context: #{id_context.inspect}"} context.parse(id_context, base: @options[:base], propagate: false) end else @@ -632,7 +632,7 @@ def expand_object(input, active_property, context, output_object, # If container mapping in the active context includes @type, and k is a term in the active context having a local context, use that context when expanding values map_context = container_context.term_definitions[k].context if container.include?('@type') && container_context.term_definitions[k] unless map_context.nil? - log_debug("expand", depth: log_depth.to_i) {"map_context: #{map_context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"map_context: #{map_context.inspect}"} map_context = container_context.parse(map_context, base: @options[:base], propagate: false) end @@ -688,21 +688,21 @@ def expand_object(input, active_property, context, output_object, # If expanded value is null, ignore key by continuing to the next key from element. if expanded_value.nil? - log_debug(" => skip nil value", depth: log_depth.to_i) + # log_debug(" => skip nil value", depth: log_depth.to_i) next end - log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} + # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} # If the container mapping associated to key in active context is @list and expanded value is not already a list object, convert expanded value to a list object by first setting it to an array containing only expanded value if it is not already an array, and then by setting it to a JSON object containing the key-value pair @list-expanded value. if container.first == '@list' && container.length == 1 && !list?(expanded_value) - log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} + # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} expanded_value = {'@list' => as_array(expanded_value)} end - log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} + # log_debug(depth: log_depth.to_i) {" => #{expanded_value.inspect}"} # convert expanded value to @graph if container specifies it if container.first == '@graph' && container.length == 1 - log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} + # log_debug(" => ", depth: log_depth.to_i) { "convert #{expanded_value.inspect} to list"} expanded_value = as_array(expanded_value).map do |v| {'@graph' => as_array(v)} end @@ -742,7 +742,7 @@ def expand_object(input, active_property, context, output_object, nest_context = if nest_context.nil? context else - log_debug("expand", depth: log_depth.to_i) {"nest_context: #{nest_context.inspect}"} + # log_debug("expand", depth: log_depth.to_i) {"nest_context: #{nest_context.inspect}"} context.parse(nest_context, base: @options[:base], override_protected: true) end diff --git a/lib/json/ld/from_rdf.rb b/lib/json/ld/from_rdf.rb index e911c23..a40c66c 100644 --- a/lib/json/ld/from_rdf.rb +++ b/lib/json/ld/from_rdf.rb @@ -14,9 +14,11 @@ module FromRDF # @param [Boolean] useRdfType (false) # If set to `true`, the JSON-LD processor will treat `rdf:type` like a normal property instead of using `@type`. # @param [Boolean] useNativeTypes (false) use native representations + # @param extendedRepresentation (false) + # Use the extended internal representation for native types. # # @return [Array] the JSON-LD document in normalized form - def from_statements(dataset, useRdfType: false, useNativeTypes: false) + def from_statements(dataset, useRdfType: false, useNativeTypes: false, extendedRepresentation: false) default_graph = {} graph_map = {'@default' => default_graph} referenced_once = {} @@ -30,7 +32,7 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) # For each statement in dataset dataset.each do |statement| - #log_debug("statement") { statement.to_nquads.chomp} + # log_debug("statement") { statement.to_nquads.chomp} name = statement.graph_name ? @context.expand_iri(statement.graph_name, base: @options[:base]).to_s : '@default' @@ -41,9 +43,9 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) default_graph[name] ||= {'@id' => name} unless name == '@default' subject = statement.subject.statement? ? - resource_representation(statement.subject, useNativeTypes)['@id'].to_json_c14n : + resource_representation(statement.subject, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n : statement.subject.to_s - node = node_map[subject] ||= resource_representation(statement.subject, useNativeTypes) + node = node_map[subject] ||= resource_representation(statement.subject, useNativeTypes, extendedRepresentation) # If predicate is rdf:datatype, note subject in compound literal subjects map if @options[:rdfDirection] == 'compound-literal' && statement.predicate == RDF.to_uri + 'direction' @@ -53,10 +55,10 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) # If object is an IRI, blank node identifier, or statement, and node map does not have an object member, create one and initialize its value to a new JSON object consisting of a single member @id whose value is set to object. unless statement.object.literal? object = statement.object.statement? ? - resource_representation(statement.object, useNativeTypes)['@id'].to_json_c14n : + resource_representation(statement.object, useNativeTypes, extendedRepresentation)['@id'].to_json_c14n : statement.object.to_s node_map[object] ||= - resource_representation(statement.object, useNativeTypes) + resource_representation(statement.object, useNativeTypes, extendedRepresentation) end # If predicate equals rdf:type, and object is an IRI or blank node identifier, append object to the value of the @type member of node. If no such member exists, create one and initialize it to an array whose only item is object. Finally, continue to the next RDF triple. @@ -66,7 +68,7 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) end # Set value to the result of using the RDF to Object Conversion algorithm, passing object, rdfDirection, and use native types. - value = resource_representation(statement.object, useNativeTypes) + value = resource_representation(statement.object, useNativeTypes, extendedRepresentation) merge_value(node, statement.predicate.to_s, value) @@ -124,7 +126,7 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) list, list_nodes = [], [] # If property equals rdf:rest, the value associated to the usages member of node has exactly 1 entry, node has a rdf:first and rdf:rest property, both of which have as value an array consisting of a single element, and node has no other members apart from an optional @type member whose value is an array with a single item equal to rdf:List, node represents a well-formed list node. Continue with the following steps: - #log_debug("list element?") {node.to_json(JSON_STATE) rescue 'malformed json'} + # log_debug("list element?") {node.to_json(JSON_STATE) rescue 'malformed json'} while property == RDF.rest.to_s && blank_node?(node) && referenced_once[node['@id']] && @@ -163,31 +165,68 @@ def from_statements(dataset, useRdfType: false, useNativeTypes: false) node.delete(:usages) result << node unless node_reference?(node) end - #log_debug("fromRdf") {result.to_json(JSON_STATE) rescue 'malformed json'} + # log_debug("fromRdf") {result.to_json(JSON_STATE) rescue 'malformed json'} result end private - def resource_representation(resource, useNativeTypes) + + RDF_LITERAL_NATIVE_TYPES = Set.new([RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double]).freeze + + def resource_representation(resource, useNativeTypes, extendedRepresentation) case resource when RDF::Statement # Note, if either subject or object are a BNode which is used elsewhere, # this might not work will with the BNode accounting from above. - rep = {'@id' => resource_representation(resource.subject, false)} + rep = {'@id' => resource_representation(resource.subject, false, extendedRepresentation)} if resource.predicate == RDF.type rep['@id'].merge!('@type' => resource.object.to_s) else rep['@id'].merge!( resource.predicate.to_s => - as_array(resource_representation(resource.object, useNativeTypes))) + as_array(resource_representation(resource.object, useNativeTypes, extendedRepresentation))) end rep when RDF::Literal - @context.expand_value(nil, - resource, - rdfDirection: @options[:rdfDirection], - useNativeTypes: useNativeTypes, - base: @options[:base]) + base = @options[:base] + rdfDirection = @options[:rdfDirection] + res = {} + + if resource.datatype == RDF::URI(RDF.to_uri + "JSON") && @context.processingMode('json-ld-1.1') + res['@type'] = '@json' + res['@value'] = begin + ::JSON.parse(resource.object) + rescue ::JSON::ParserError => e + raise JSON::LD::JsonLdError::InvalidJsonLiteral, e.message + end + elsif useNativeTypes && extendedRepresentation + res['@value'] = resource # Raw literal + elsif resource.datatype.start_with?("https://www.w3.org/ns/i18n#") && rdfDirection == 'i18n-datatype' && @context.processingMode('json-ld-1.1') + lang, dir = resource.datatype.fragment.split('_') + res['@value'] = resource.to_s + unless lang.empty? + if lang !~ /^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$/ + if options[:validate] + raise JsonLdError::InvalidLanguageMapping, "rdf:language must be valid BCP47: #{lang.inspect}" + else + warn "rdf:language must be valid BCP47: #{lang.inspect}" + end + end + res['@language'] = lang + end + res['@direction'] = dir + elsif useNativeTypes && RDF_LITERAL_NATIVE_TYPES.include?(resource.datatype) && resource.valid? + res['@value'] = resource.object + else + resource.canonicalize! if resource.valid? && resource.datatype == RDF::XSD.double + if resource.datatype? + res['@type'] = resource.datatype.to_s + elsif resource.language? + res['@language'] = resource.language.to_s + end + res['@value'] = resource.to_s + end + res else {'@id' => resource.to_s} end diff --git a/lib/json/ld/streaming_writer.rb b/lib/json/ld/streaming_writer.rb index 481babe..e3238c9 100644 --- a/lib/json/ld/streaming_writer.rb +++ b/lib/json/ld/streaming_writer.rb @@ -19,7 +19,7 @@ def stream_prologue else Context.parse(@options[:context]) end - #log_debug("prologue") {"context: #{context.inspect}"} + # log_debug("prologue") {"context: #{context.inspect}"} if context @output.puts %({"@context": #{context.serialize['@context'].to_json}, "@graph": [) else @@ -39,7 +39,7 @@ def stream_prologue # # @return [void] `self` def stream_statement(statement) - #log_debug("ss") {"state: #{@state.inspect}, stmt: #{statement}"} + # log_debug("ss") {"state: #{@state.inspect}, stmt: #{statement}"} if @current_graph != statement.graph_name end_graph start_graph(statement.graph_name) @@ -76,7 +76,7 @@ def stream_statement(statement) # Complete open statements # @return [void] `self` def stream_epilogue - #log_debug("epilogue") {"state: #{@state.inspect}"} + # log_debug("epilogue") {"state: #{@state.inspect}"} end_graph if context @output.puts "\n]}" @@ -89,7 +89,7 @@ def stream_epilogue private def start_graph(resource) - #log_debug("start_graph") {"state: #{@state.inspect}, resource: #{resource}"} + # log_debug("start_graph") {"state: #{@state.inspect}, resource: #{resource}"} if resource @output.puts(",") if %i(wrote_node wrote_graph).include?(@state) @output.puts %({"@id": "#{resource}", "@graph": [) @@ -99,7 +99,7 @@ def start_graph(resource) end def end_graph - #log_debug("end_graph") {"state: #{@state.inspect}, ctx: #{@current_graph}"} + # log_debug("end_graph") {"state: #{@state.inspect}, ctx: #{@current_graph}"} end_node if @current_graph @output.write %(]}) @@ -108,7 +108,7 @@ def end_graph end def end_node - #log_debug("end_node") {"state: #{@state.inspect}, node: #{@current_node_def.to_json}"} + # log_debug("end_node") {"state: #{@state.inspect}, node: #{@current_node_def.to_json}"} @output.puts(",") if %i(wrote_node wrote_graph).include?(@state) if @current_node_def node_def = if context diff --git a/lib/json/ld/to_rdf.rb b/lib/json/ld/to_rdf.rb index deb9434..c717292 100644 --- a/lib/json/ld/to_rdf.rb +++ b/lib/json/ld/to_rdf.rb @@ -25,6 +25,8 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) datatype = RDF::URI(RDF.to_uri + "JSON") if datatype == '@json' case value + when RDF::Value + return value when TrueClass, FalseClass # If value is true or false, then set value its canonical lexical form as defined in the section Data Round Tripping. If datatype is null, set it to xsd:boolean. value = value.to_s @@ -88,14 +90,14 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) to_enum(:item_to_rdf, item['@id'], quoted: true).to_a.first end - #log_debug("item_to_rdf") {"subject: #{subject.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"subject: #{subject.to_ntriples rescue 'malformed rdf'}"} item.each do |property, values| case property when '@type' # If property is @type, construct triple as an RDF Triple composed of id, rdf:type, and object from values where id and object are represented either as IRIs or Blank Nodes values.each do |v| object = as_resource(v) - #log_debug("item_to_rdf") {"type: #{object.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"type: #{object.to_ntriples rescue 'malformed rdf'}"} yield RDF::Statement(subject, RDF.type, object, graph_name: graph_name, quoted: quoted) end when '@graph' @@ -107,12 +109,12 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) raise "Huh?" unless values.is_a?(Hash) values.each do |prop, vv| predicate = as_resource(prop) - #log_debug("item_to_rdf") {"@reverse predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"@reverse predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} # For each item in values vv.each do |v| # Item is a node definition. Generate object as the result of the Object Converstion algorithm passing item. object = item_to_rdf(v, graph_name: graph_name, &block) - #log_debug("item_to_rdf") {"subject: #{object.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"subject: #{object.to_ntriples rescue 'malformed rdf'}"} # yield subject, prediate, and literal to results. yield RDF::Statement(object, predicate, subject, graph_name: graph_name, quoted: quoted) end @@ -127,12 +129,12 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) # Otherwise, property is an IRI or Blank Node identifier # Initialize predicate from property as an IRI or Blank node predicate = as_resource(property) - #log_debug("item_to_rdf") {"predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"predicate: #{predicate.to_ntriples rescue 'malformed rdf'}"} # For each item in values values.each do |v| if list?(v) - #log_debug("item_to_rdf") {"list: #{v.inspect}"} + # log_debug("item_to_rdf") {"list: #{v.inspect}"} # If item is a list object, initialize list_results as an empty array, and object to the result of the List Conversion algorithm, passing the value associated with the @list key from item and list_results. object = parse_list(v['@list'], graph_name: graph_name, &block) @@ -141,7 +143,7 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) else # Otherwise, item is a value object or a node definition. Generate object as the result of the Object Converstion algorithm passing item. object = item_to_rdf(v, graph_name: graph_name, &block) - #log_debug("item_to_rdf") {"object: #{object.to_ntriples rescue 'malformed rdf'}"} + # log_debug("item_to_rdf") {"object: #{object.to_ntriples rescue 'malformed rdf'}"} # yield subject, prediate, and literal to results. yield RDF::Statement(subject, predicate, object, graph_name: graph_name, quoted: quoted) end @@ -162,7 +164,7 @@ def item_to_rdf(item, graph_name: nil, quoted: false, &block) # @return [Array] # Statements for each item in the list def parse_list(list, graph_name: nil, &block) - #log_debug('parse_list') {"list: #{list.inspect}"} + # log_debug('parse_list') {"list: #{list.inspect}"} last = list.pop result = first_bnode = last ? node : RDF.nil diff --git a/lib/json/ld/writer.rb b/lib/json/ld/writer.rb index ec681bf..096a1d4 100644 --- a/lib/json/ld/writer.rb +++ b/lib/json/ld/writer.rb @@ -298,7 +298,7 @@ def write_epilogue stream_epilogue else - log_debug("writer") { "serialize #{@repo.count} statements, #{@options.inspect}"} + # log_debug("writer") { "serialize #{@repo.count} statements, #{@options.inspect}"} result = API.fromRdf(@repo, **@options.merge(serializer: nil)) # Some options may be indicated from accept parameters @@ -326,11 +326,11 @@ def write_epilogue if frame = @options[:frame] # Perform framing, if given a frame - log_debug("writer") { "frame result"} + # log_debug("writer") { "frame result"} result = API.frame(result, frame, **@options.merge(serializer: nil)) elsif context # Perform compaction, if we have a context - log_debug("writer") { "compact result"} + # log_debug("writer") { "compact result"} result = API.compact(result, context, **@options.merge(serializer: nil)) end diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 1cb9dc5..01bade2 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -655,7 +655,6 @@ def containers "@language" => "en" } }, logger) - expect(subject.to_rb).not_to be_empty end it "@vocab" do @@ -665,7 +664,6 @@ def containers "@vocab" => "http://example.com/" } }, logger) - expect(subject.to_rb).not_to be_empty end it "term mappings" do @@ -676,7 +674,6 @@ def containers "foo" => "http://example.com/" } }, logger) - expect(c.to_rb).not_to be_empty end it "@context" do @@ -931,6 +928,37 @@ def containers end + describe "#to_rb" do + before(:all) {JSON::LD::Context.instance_variable_set(:@cache, nil)} + subject { + allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc) + context.parse("http://example.com/context") + } + + it "encodes as utf-8" do + expect(subject.to_rb).to match(/encoding: utf-8/) + end + + it "marked as auto-generated" do + expect(subject.to_rb).to match(/This file generated automatically from/) + end + + it "includes URL in preloaded" do + expect(subject.to_rb).to include(%(add_preloaded("http://example.com/context"))) + end + + it "includes processingMode" do + expect(subject.to_rb).to include(%(processingMode: "json-ld-1.1")) + end + + it "term mappings" do + expect(subject.to_rb).to include(%("avatar" => TermDefinition.new("avatar", id: "http://xmlns.com/foaf/0.1/avatar", type_mapping: "@id"))) + expect(subject.to_rb).to include(%("homepage" => TermDefinition.new("homepage", id: "http://xmlns.com/foaf/0.1/homepage", type_mapping: "@id"))) + expect(subject.to_rb).to include(%("name" => TermDefinition.new("name", id: "http://xmlns.com/foaf/0.1/name", simple: true))) + expect(subject.to_rb).to include(%("xsd" => TermDefinition.new("xsd", id: "http://www.w3.org/2001/XMLSchema#", simple: true, prefix: true))) + end + end + describe "#base=" do subject { context.parse({ @@ -1553,14 +1581,6 @@ def containers "native double" => ["foo", 1.1e1, {"@value" => 1.1E1}], "native date" => ["foo", Date.parse("2011-12-27"), {"@value" => "2011-12-27", "@type" => RDF::XSD.date.to_s}], "native dateTime" =>["foo", DateTime.parse("2011-12-27T10:11:12Z"), {"@value" => "2011-12-27T10:11:12Z", "@type" => RDF::XSD.dateTime.to_s}], - "rdf boolean" => ["foo", RDF::Literal(true), {"@value" => "true", "@type" => RDF::XSD.boolean.to_s}], - "rdf integer" => ["foo", RDF::Literal(1), {"@value" => "1", "@type" => RDF::XSD.integer.to_s}], - "rdf decimal" => ["foo", RDF::Literal::Decimal.new(1.1), {"@value" => "1.1", "@type" => RDF::XSD.decimal.to_s}], - "rdf double" => ["foo", RDF::Literal::Double.new(1.1), {"@value" => "1.1E0", "@type" => RDF::XSD.double.to_s}], - "rdf URI" => ["foo", RDF::URI("foo"), {"@id" => "foo"}], - "rdf date " => ["foo", RDF::Literal(Date.parse("2011-12-27")), {"@value" => "2011-12-27", "@type" => RDF::XSD.date.to_s}], - "rdf nonNeg" => ["foo", RDF::Literal::NonNegativeInteger.new(1), {"@value" => "1", "@type" => RDF::XSD.nonNegativeInteger}], - "rdf float" => ["foo", RDF::Literal::Float.new(1.0), {"@value" => "1.0", "@type" => RDF::XSD.float}], "ex:none string" => ["ex:none", "foo", {"@value" => "foo"}], "ex:none boolean" =>["ex:none", true, {"@value" => true}], "ex:none integer" =>["ex:none", 1, {"@value" => 1}], diff --git a/spec/frame_spec.rb b/spec/frame_spec.rb index 1a4deb9..8f3ad7f 100644 --- a/spec/frame_spec.rb +++ b/spec/frame_spec.rb @@ -2463,6 +2463,49 @@ }), processingMode: "json-ld-1.1" }, + "issue #142": { + input: %({ + "@context":{ + "ex":"http://example.org/vocab#", + "ex:info":{"@type":"@json"}, + "ex:other":{"@type":"@json"} + }, + "@id":"http://example.org/test/#library", + "@type":"ex:Library", + "ex:info":{ + "author":"JOHN", + "pages":200 + }, + "ex:other":{ + "publisher":"JANE" + } + }), + frame: %({ + "@context":{ + "ex":"http://example.org/vocab#", + "ex:info":{"@type":"@json"}, + "ex:other":{"@type":"@json"} + }, + "http://example.org/vocab#info":{} + }), + output: %({ + "@context": { + "ex": "http://example.org/vocab#", + "ex:info": {"@type": "@json"}, + "ex:other": {"@type": "@json"} + }, + "@id": "http://example.org/test/#library", + "@type": "ex:Library", + "ex:info": { + "author": "JOHN", + "pages": 200 + }, + "ex:other": { + "publisher": "JANE" + } + }), + processingMode: "json-ld-1.1" + } }.each do |title, params| it title do do_frame(params) diff --git a/spec/from_rdf_spec.rb b/spec/from_rdf_spec.rb index c509432..ca9148e 100644 --- a/spec/from_rdf_spec.rb +++ b/spec/from_rdf_spec.rb @@ -268,6 +268,73 @@ it(title) {do_fromRdf(processingMode: "json-ld-1.1", **params)} end end + + context "extendedRepresentation: true" do + { + "true": { + output: [{ + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#bool" => [{"@value" => RDF::Literal(true)}] + }], + input:%( + @prefix ex: . + @prefix rdf: . + ex:id ex:bool true . + ) + }, + "false": { + output: [{ + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#bool" => [{"@value" => RDF::Literal(false)}] + }], + input: %( + @prefix ex: . + @prefix rdf: . + ex:id ex:bool false . + ) + }, + "double": { + output: [{ + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#double" => [{"@value" => RDF::Literal(1.23E0)}] + }], + input: %( + @prefix ex: . + @prefix rdf: . + ex:id ex:double 1.23E0 . + ) + }, + "double-zero": { + output: [{ + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#double" => [{"@value" => RDF::Literal(0, datatype: RDF::XSD.double)}] + }], + input: %( + @prefix ex: . + @prefix rdf: . + ex:id ex:double 0.0E0 . + ) + }, + "integer": { + output: [{ + "@id" => "http://example.org/vocab#id", + "http://example.org/vocab#integer" => [{"@value" => RDF::Literal(123)}] + }], + input: %( + @prefix ex: . + @prefix rdf: . + ex:id ex:integer 123 . + ) + }, + }.each do |title, params| + params[:input] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:input]) + it(title) { + do_fromRdf(processingMode: "json-ld-1.1", + useNativeTypes: true, + extendedRepresentation: true, + **params)} + end + end end context "anons" do diff --git a/spec/to_rdf_spec.rb b/spec/to_rdf_spec.rb index 69b3b90..e5fe1c0 100644 --- a/spec/to_rdf_spec.rb +++ b/spec/to_rdf_spec.rb @@ -409,6 +409,139 @@ end end end + + context "with xsd: true" do + { + "true": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal(true) + }, + output: %( + @prefix ex: . + @prefix rdf: . + [ex:e true] . + ) + }, + "integer": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal(1) + }, + output: %( + @prefix ex: . + @prefix rdf: . + [ex:e 1] . + ) + }, + "decimal": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal::Decimal.new("1.1") + }, + output: %( + @prefix ex: . + @prefix rdf: . + [ex:e 1.1] . + ) + }, + "float": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("1.1e1", datatype: RDF::XSD.float) + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e "1.1e1"^^xsd:float] . + ) + }, + "double": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("1.1e1", datatype: RDF::XSD.double) + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e 1.1e1] . + ) + }, + "date": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("2022-08-27", datatype: RDF::XSD.date) + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e "2022-08-27"^^xsd:date] . + ) + }, + "time": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("12:00:00", datatype: RDF::XSD.time) + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e "12:00:00"^^xsd:time] . + ) + }, + "dateTime": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("2022-08-27T12:00:00", datatype: RDF::XSD.dateTime) + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e "2022-08-27T12:00:00"^^xsd:dateTime] . + ) + }, + "language": { + input: { + "@context" => { + "e" => "http://example.org/vocab#e" + }, + "e" => RDF::Literal.new("language", language: :"en-us") + }, + output: %( + @prefix ex: . + @prefix rdf: . + @prefix xsd: . + [ex:e "language"@en-us] . + ) + }, + }.each do |title, params| + it title do + params[:output] = RDF::Graph.new << RDF::Turtle::Reader.new(params[:output]) + run_to_rdf(params.merge(xsd: true)) + end + end + end end context "prefixes" do